repo_id
stringlengths
5
115
size
int64
590
5.01M
file_path
stringlengths
4
212
content
stringlengths
590
5.01M
marvin-hansen/iggy-streaming-system
64,948
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-arm/crypto/fipsmodule/sha256-armv4.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_ARM) && defined(__ELF__) @ Copyright 2007-2016 The OpenSSL Project Authors. All Rights Reserved. @ @ Licensed under the OpenSSL license (the "License"). You may not use @ this file except in compliance with the License. You can obtain a copy @ in the file LICENSE in the source distribution or at @ https://www.openssl.org/source/license.html @ ==================================================================== @ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL @ project. The module is, however, dual licensed under OpenSSL and @ CRYPTOGAMS licenses depending on where you obtain it. For further @ details see http://www.openssl.org/~appro/cryptogams/. @ @ Permission to use under GPL terms is granted. @ ==================================================================== @ SHA256 block procedure for ARMv4. May 2007. @ Performance is ~2x better than gcc 3.4 generated code and in "abso- @ lute" terms is ~2250 cycles per 64-byte block or ~35 cycles per @ byte [on single-issue Xscale PXA250 core]. @ July 2010. @ @ Rescheduling for dual-issue pipeline resulted in 22% improvement on @ Cortex A8 core and ~20 cycles per processed byte. @ February 2011. @ @ Profiler-assisted and platform-specific optimization resulted in 16% @ improvement on Cortex A8 core and ~15.4 cycles per processed byte. @ September 2013. @ @ Add NEON implementation. On Cortex A8 it was measured to process one @ byte in 12.5 cycles or 23% faster than integer-only code. Snapdragon @ S4 does it in 12.5 cycles too, but it's 50% faster than integer-only @ code (meaning that latter performs sub-optimally, nothing was done @ about it). @ May 2014. @ @ Add ARMv8 code path performing at 2.0 cpb on Apple A7. #ifndef __KERNEL__ # include <openssl/arm_arch.h> #else # define __ARM_ARCH __LINUX_ARM_ARCH__ # define __ARM_MAX_ARCH__ 7 #endif @ Silence ARMv8 deprecated IT instruction warnings. This file is used by both @ ARMv7 and ARMv8 processors. It does have ARMv8-only code, but those @ instructions are manually-encoded. (See unsha256.) .arch armv7-a .text #if defined(__thumb2__) .syntax unified .thumb #else .code 32 #endif .type K256,%object .align 5 K256: .word 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .word 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .word 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .word 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .word 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .word 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .word 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .word 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .word 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .word 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .word 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .word 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .word 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .word 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .word 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .word 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .size K256,.-K256 .word 0 @ terminator .align 5 .globl sha256_block_data_order_nohw .hidden sha256_block_data_order_nohw .type sha256_block_data_order_nohw,%function sha256_block_data_order_nohw: add r2,r1,r2,lsl#6 @ len to point at the end of inp stmdb sp!,{r0,r1,r2,r4-r11,lr} ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11} adr r14,K256 sub sp,sp,#16*4 @ alloca(X[16]) .Loop: # if __ARM_ARCH>=7 ldr r2,[r1],#4 # else ldrb r2,[r1,#3] # endif eor r3,r5,r6 @ magic eor r12,r12,r12 #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 0 # if 0==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r8,r8,ror#5 add r4,r4,r12 @ h+=Maj(a,b,c) from the past eor r0,r0,r8,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 0 add r4,r4,r12 @ h+=Maj(a,b,c) from the past ldrb r12,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r12,lsl#8 ldrb r12,[r1],#4 orr r2,r2,r0,lsl#16 # if 0==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r8,r8,ror#5 orr r2,r2,r12,lsl#24 eor r0,r0,r8,ror#19 @ Sigma1(e) #endif ldr r12,[r14],#4 @ *K256++ add r11,r11,r2 @ h+=X[i] str r2,[sp,#0*4] eor r2,r9,r10 add r11,r11,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r8 add r11,r11,r12 @ h+=K256[i] eor r2,r2,r10 @ Ch(e,f,g) eor r0,r4,r4,ror#11 add r11,r11,r2 @ h+=Ch(e,f,g) #if 0==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 0<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r4,r5 @ a^b, b^c in next round #else ldr r2,[sp,#2*4] @ from future BODY_16_xx eor r12,r4,r5 @ a^b, b^c in next round ldr r1,[sp,#15*4] @ from future BODY_16_xx #endif eor r0,r0,r4,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r7,r7,r11 @ d+=h eor r3,r3,r5 @ Maj(a,b,c) add r11,r11,r0,ror#2 @ h+=Sigma0(a) @ add r11,r11,r3 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 1 # if 1==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r7,r7,ror#5 add r11,r11,r3 @ h+=Maj(a,b,c) from the past eor r0,r0,r7,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 1 add r11,r11,r3 @ h+=Maj(a,b,c) from the past ldrb r3,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r3,lsl#8 ldrb r3,[r1],#4 orr r2,r2,r0,lsl#16 # if 1==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r7,r7,ror#5 orr r2,r2,r3,lsl#24 eor r0,r0,r7,ror#19 @ Sigma1(e) #endif ldr r3,[r14],#4 @ *K256++ add r10,r10,r2 @ h+=X[i] str r2,[sp,#1*4] eor r2,r8,r9 add r10,r10,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r7 add r10,r10,r3 @ h+=K256[i] eor r2,r2,r9 @ Ch(e,f,g) eor r0,r11,r11,ror#11 add r10,r10,r2 @ h+=Ch(e,f,g) #if 1==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 1<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r11,r4 @ a^b, b^c in next round #else ldr r2,[sp,#3*4] @ from future BODY_16_xx eor r3,r11,r4 @ a^b, b^c in next round ldr r1,[sp,#0*4] @ from future BODY_16_xx #endif eor r0,r0,r11,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r6,r6,r10 @ d+=h eor r12,r12,r4 @ Maj(a,b,c) add r10,r10,r0,ror#2 @ h+=Sigma0(a) @ add r10,r10,r12 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 2 # if 2==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r6,r6,ror#5 add r10,r10,r12 @ h+=Maj(a,b,c) from the past eor r0,r0,r6,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 2 add r10,r10,r12 @ h+=Maj(a,b,c) from the past ldrb r12,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r12,lsl#8 ldrb r12,[r1],#4 orr r2,r2,r0,lsl#16 # if 2==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r6,r6,ror#5 orr r2,r2,r12,lsl#24 eor r0,r0,r6,ror#19 @ Sigma1(e) #endif ldr r12,[r14],#4 @ *K256++ add r9,r9,r2 @ h+=X[i] str r2,[sp,#2*4] eor r2,r7,r8 add r9,r9,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r6 add r9,r9,r12 @ h+=K256[i] eor r2,r2,r8 @ Ch(e,f,g) eor r0,r10,r10,ror#11 add r9,r9,r2 @ h+=Ch(e,f,g) #if 2==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 2<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r10,r11 @ a^b, b^c in next round #else ldr r2,[sp,#4*4] @ from future BODY_16_xx eor r12,r10,r11 @ a^b, b^c in next round ldr r1,[sp,#1*4] @ from future BODY_16_xx #endif eor r0,r0,r10,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r5,r5,r9 @ d+=h eor r3,r3,r11 @ Maj(a,b,c) add r9,r9,r0,ror#2 @ h+=Sigma0(a) @ add r9,r9,r3 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 3 # if 3==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r5,r5,ror#5 add r9,r9,r3 @ h+=Maj(a,b,c) from the past eor r0,r0,r5,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 3 add r9,r9,r3 @ h+=Maj(a,b,c) from the past ldrb r3,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r3,lsl#8 ldrb r3,[r1],#4 orr r2,r2,r0,lsl#16 # if 3==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r5,r5,ror#5 orr r2,r2,r3,lsl#24 eor r0,r0,r5,ror#19 @ Sigma1(e) #endif ldr r3,[r14],#4 @ *K256++ add r8,r8,r2 @ h+=X[i] str r2,[sp,#3*4] eor r2,r6,r7 add r8,r8,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r5 add r8,r8,r3 @ h+=K256[i] eor r2,r2,r7 @ Ch(e,f,g) eor r0,r9,r9,ror#11 add r8,r8,r2 @ h+=Ch(e,f,g) #if 3==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 3<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r9,r10 @ a^b, b^c in next round #else ldr r2,[sp,#5*4] @ from future BODY_16_xx eor r3,r9,r10 @ a^b, b^c in next round ldr r1,[sp,#2*4] @ from future BODY_16_xx #endif eor r0,r0,r9,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r4,r4,r8 @ d+=h eor r12,r12,r10 @ Maj(a,b,c) add r8,r8,r0,ror#2 @ h+=Sigma0(a) @ add r8,r8,r12 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 4 # if 4==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r4,r4,ror#5 add r8,r8,r12 @ h+=Maj(a,b,c) from the past eor r0,r0,r4,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 4 add r8,r8,r12 @ h+=Maj(a,b,c) from the past ldrb r12,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r12,lsl#8 ldrb r12,[r1],#4 orr r2,r2,r0,lsl#16 # if 4==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r4,r4,ror#5 orr r2,r2,r12,lsl#24 eor r0,r0,r4,ror#19 @ Sigma1(e) #endif ldr r12,[r14],#4 @ *K256++ add r7,r7,r2 @ h+=X[i] str r2,[sp,#4*4] eor r2,r5,r6 add r7,r7,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r4 add r7,r7,r12 @ h+=K256[i] eor r2,r2,r6 @ Ch(e,f,g) eor r0,r8,r8,ror#11 add r7,r7,r2 @ h+=Ch(e,f,g) #if 4==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 4<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r8,r9 @ a^b, b^c in next round #else ldr r2,[sp,#6*4] @ from future BODY_16_xx eor r12,r8,r9 @ a^b, b^c in next round ldr r1,[sp,#3*4] @ from future BODY_16_xx #endif eor r0,r0,r8,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r11,r11,r7 @ d+=h eor r3,r3,r9 @ Maj(a,b,c) add r7,r7,r0,ror#2 @ h+=Sigma0(a) @ add r7,r7,r3 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 5 # if 5==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r11,r11,ror#5 add r7,r7,r3 @ h+=Maj(a,b,c) from the past eor r0,r0,r11,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 5 add r7,r7,r3 @ h+=Maj(a,b,c) from the past ldrb r3,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r3,lsl#8 ldrb r3,[r1],#4 orr r2,r2,r0,lsl#16 # if 5==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r11,r11,ror#5 orr r2,r2,r3,lsl#24 eor r0,r0,r11,ror#19 @ Sigma1(e) #endif ldr r3,[r14],#4 @ *K256++ add r6,r6,r2 @ h+=X[i] str r2,[sp,#5*4] eor r2,r4,r5 add r6,r6,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r11 add r6,r6,r3 @ h+=K256[i] eor r2,r2,r5 @ Ch(e,f,g) eor r0,r7,r7,ror#11 add r6,r6,r2 @ h+=Ch(e,f,g) #if 5==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 5<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r7,r8 @ a^b, b^c in next round #else ldr r2,[sp,#7*4] @ from future BODY_16_xx eor r3,r7,r8 @ a^b, b^c in next round ldr r1,[sp,#4*4] @ from future BODY_16_xx #endif eor r0,r0,r7,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r10,r10,r6 @ d+=h eor r12,r12,r8 @ Maj(a,b,c) add r6,r6,r0,ror#2 @ h+=Sigma0(a) @ add r6,r6,r12 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 6 # if 6==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r10,r10,ror#5 add r6,r6,r12 @ h+=Maj(a,b,c) from the past eor r0,r0,r10,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 6 add r6,r6,r12 @ h+=Maj(a,b,c) from the past ldrb r12,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r12,lsl#8 ldrb r12,[r1],#4 orr r2,r2,r0,lsl#16 # if 6==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r10,r10,ror#5 orr r2,r2,r12,lsl#24 eor r0,r0,r10,ror#19 @ Sigma1(e) #endif ldr r12,[r14],#4 @ *K256++ add r5,r5,r2 @ h+=X[i] str r2,[sp,#6*4] eor r2,r11,r4 add r5,r5,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r10 add r5,r5,r12 @ h+=K256[i] eor r2,r2,r4 @ Ch(e,f,g) eor r0,r6,r6,ror#11 add r5,r5,r2 @ h+=Ch(e,f,g) #if 6==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 6<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r6,r7 @ a^b, b^c in next round #else ldr r2,[sp,#8*4] @ from future BODY_16_xx eor r12,r6,r7 @ a^b, b^c in next round ldr r1,[sp,#5*4] @ from future BODY_16_xx #endif eor r0,r0,r6,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r9,r9,r5 @ d+=h eor r3,r3,r7 @ Maj(a,b,c) add r5,r5,r0,ror#2 @ h+=Sigma0(a) @ add r5,r5,r3 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 7 # if 7==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r9,r9,ror#5 add r5,r5,r3 @ h+=Maj(a,b,c) from the past eor r0,r0,r9,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 7 add r5,r5,r3 @ h+=Maj(a,b,c) from the past ldrb r3,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r3,lsl#8 ldrb r3,[r1],#4 orr r2,r2,r0,lsl#16 # if 7==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r9,r9,ror#5 orr r2,r2,r3,lsl#24 eor r0,r0,r9,ror#19 @ Sigma1(e) #endif ldr r3,[r14],#4 @ *K256++ add r4,r4,r2 @ h+=X[i] str r2,[sp,#7*4] eor r2,r10,r11 add r4,r4,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r9 add r4,r4,r3 @ h+=K256[i] eor r2,r2,r11 @ Ch(e,f,g) eor r0,r5,r5,ror#11 add r4,r4,r2 @ h+=Ch(e,f,g) #if 7==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 7<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r5,r6 @ a^b, b^c in next round #else ldr r2,[sp,#9*4] @ from future BODY_16_xx eor r3,r5,r6 @ a^b, b^c in next round ldr r1,[sp,#6*4] @ from future BODY_16_xx #endif eor r0,r0,r5,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r8,r8,r4 @ d+=h eor r12,r12,r6 @ Maj(a,b,c) add r4,r4,r0,ror#2 @ h+=Sigma0(a) @ add r4,r4,r12 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 8 # if 8==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r8,r8,ror#5 add r4,r4,r12 @ h+=Maj(a,b,c) from the past eor r0,r0,r8,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 8 add r4,r4,r12 @ h+=Maj(a,b,c) from the past ldrb r12,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r12,lsl#8 ldrb r12,[r1],#4 orr r2,r2,r0,lsl#16 # if 8==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r8,r8,ror#5 orr r2,r2,r12,lsl#24 eor r0,r0,r8,ror#19 @ Sigma1(e) #endif ldr r12,[r14],#4 @ *K256++ add r11,r11,r2 @ h+=X[i] str r2,[sp,#8*4] eor r2,r9,r10 add r11,r11,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r8 add r11,r11,r12 @ h+=K256[i] eor r2,r2,r10 @ Ch(e,f,g) eor r0,r4,r4,ror#11 add r11,r11,r2 @ h+=Ch(e,f,g) #if 8==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 8<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r4,r5 @ a^b, b^c in next round #else ldr r2,[sp,#10*4] @ from future BODY_16_xx eor r12,r4,r5 @ a^b, b^c in next round ldr r1,[sp,#7*4] @ from future BODY_16_xx #endif eor r0,r0,r4,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r7,r7,r11 @ d+=h eor r3,r3,r5 @ Maj(a,b,c) add r11,r11,r0,ror#2 @ h+=Sigma0(a) @ add r11,r11,r3 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 9 # if 9==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r7,r7,ror#5 add r11,r11,r3 @ h+=Maj(a,b,c) from the past eor r0,r0,r7,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 9 add r11,r11,r3 @ h+=Maj(a,b,c) from the past ldrb r3,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r3,lsl#8 ldrb r3,[r1],#4 orr r2,r2,r0,lsl#16 # if 9==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r7,r7,ror#5 orr r2,r2,r3,lsl#24 eor r0,r0,r7,ror#19 @ Sigma1(e) #endif ldr r3,[r14],#4 @ *K256++ add r10,r10,r2 @ h+=X[i] str r2,[sp,#9*4] eor r2,r8,r9 add r10,r10,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r7 add r10,r10,r3 @ h+=K256[i] eor r2,r2,r9 @ Ch(e,f,g) eor r0,r11,r11,ror#11 add r10,r10,r2 @ h+=Ch(e,f,g) #if 9==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 9<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r11,r4 @ a^b, b^c in next round #else ldr r2,[sp,#11*4] @ from future BODY_16_xx eor r3,r11,r4 @ a^b, b^c in next round ldr r1,[sp,#8*4] @ from future BODY_16_xx #endif eor r0,r0,r11,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r6,r6,r10 @ d+=h eor r12,r12,r4 @ Maj(a,b,c) add r10,r10,r0,ror#2 @ h+=Sigma0(a) @ add r10,r10,r12 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 10 # if 10==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r6,r6,ror#5 add r10,r10,r12 @ h+=Maj(a,b,c) from the past eor r0,r0,r6,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 10 add r10,r10,r12 @ h+=Maj(a,b,c) from the past ldrb r12,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r12,lsl#8 ldrb r12,[r1],#4 orr r2,r2,r0,lsl#16 # if 10==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r6,r6,ror#5 orr r2,r2,r12,lsl#24 eor r0,r0,r6,ror#19 @ Sigma1(e) #endif ldr r12,[r14],#4 @ *K256++ add r9,r9,r2 @ h+=X[i] str r2,[sp,#10*4] eor r2,r7,r8 add r9,r9,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r6 add r9,r9,r12 @ h+=K256[i] eor r2,r2,r8 @ Ch(e,f,g) eor r0,r10,r10,ror#11 add r9,r9,r2 @ h+=Ch(e,f,g) #if 10==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 10<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r10,r11 @ a^b, b^c in next round #else ldr r2,[sp,#12*4] @ from future BODY_16_xx eor r12,r10,r11 @ a^b, b^c in next round ldr r1,[sp,#9*4] @ from future BODY_16_xx #endif eor r0,r0,r10,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r5,r5,r9 @ d+=h eor r3,r3,r11 @ Maj(a,b,c) add r9,r9,r0,ror#2 @ h+=Sigma0(a) @ add r9,r9,r3 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 11 # if 11==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r5,r5,ror#5 add r9,r9,r3 @ h+=Maj(a,b,c) from the past eor r0,r0,r5,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 11 add r9,r9,r3 @ h+=Maj(a,b,c) from the past ldrb r3,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r3,lsl#8 ldrb r3,[r1],#4 orr r2,r2,r0,lsl#16 # if 11==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r5,r5,ror#5 orr r2,r2,r3,lsl#24 eor r0,r0,r5,ror#19 @ Sigma1(e) #endif ldr r3,[r14],#4 @ *K256++ add r8,r8,r2 @ h+=X[i] str r2,[sp,#11*4] eor r2,r6,r7 add r8,r8,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r5 add r8,r8,r3 @ h+=K256[i] eor r2,r2,r7 @ Ch(e,f,g) eor r0,r9,r9,ror#11 add r8,r8,r2 @ h+=Ch(e,f,g) #if 11==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 11<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r9,r10 @ a^b, b^c in next round #else ldr r2,[sp,#13*4] @ from future BODY_16_xx eor r3,r9,r10 @ a^b, b^c in next round ldr r1,[sp,#10*4] @ from future BODY_16_xx #endif eor r0,r0,r9,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r4,r4,r8 @ d+=h eor r12,r12,r10 @ Maj(a,b,c) add r8,r8,r0,ror#2 @ h+=Sigma0(a) @ add r8,r8,r12 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 12 # if 12==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r4,r4,ror#5 add r8,r8,r12 @ h+=Maj(a,b,c) from the past eor r0,r0,r4,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 12 add r8,r8,r12 @ h+=Maj(a,b,c) from the past ldrb r12,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r12,lsl#8 ldrb r12,[r1],#4 orr r2,r2,r0,lsl#16 # if 12==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r4,r4,ror#5 orr r2,r2,r12,lsl#24 eor r0,r0,r4,ror#19 @ Sigma1(e) #endif ldr r12,[r14],#4 @ *K256++ add r7,r7,r2 @ h+=X[i] str r2,[sp,#12*4] eor r2,r5,r6 add r7,r7,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r4 add r7,r7,r12 @ h+=K256[i] eor r2,r2,r6 @ Ch(e,f,g) eor r0,r8,r8,ror#11 add r7,r7,r2 @ h+=Ch(e,f,g) #if 12==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 12<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r8,r9 @ a^b, b^c in next round #else ldr r2,[sp,#14*4] @ from future BODY_16_xx eor r12,r8,r9 @ a^b, b^c in next round ldr r1,[sp,#11*4] @ from future BODY_16_xx #endif eor r0,r0,r8,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r11,r11,r7 @ d+=h eor r3,r3,r9 @ Maj(a,b,c) add r7,r7,r0,ror#2 @ h+=Sigma0(a) @ add r7,r7,r3 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 13 # if 13==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r11,r11,ror#5 add r7,r7,r3 @ h+=Maj(a,b,c) from the past eor r0,r0,r11,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 13 add r7,r7,r3 @ h+=Maj(a,b,c) from the past ldrb r3,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r3,lsl#8 ldrb r3,[r1],#4 orr r2,r2,r0,lsl#16 # if 13==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r11,r11,ror#5 orr r2,r2,r3,lsl#24 eor r0,r0,r11,ror#19 @ Sigma1(e) #endif ldr r3,[r14],#4 @ *K256++ add r6,r6,r2 @ h+=X[i] str r2,[sp,#13*4] eor r2,r4,r5 add r6,r6,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r11 add r6,r6,r3 @ h+=K256[i] eor r2,r2,r5 @ Ch(e,f,g) eor r0,r7,r7,ror#11 add r6,r6,r2 @ h+=Ch(e,f,g) #if 13==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 13<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r7,r8 @ a^b, b^c in next round #else ldr r2,[sp,#15*4] @ from future BODY_16_xx eor r3,r7,r8 @ a^b, b^c in next round ldr r1,[sp,#12*4] @ from future BODY_16_xx #endif eor r0,r0,r7,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r10,r10,r6 @ d+=h eor r12,r12,r8 @ Maj(a,b,c) add r6,r6,r0,ror#2 @ h+=Sigma0(a) @ add r6,r6,r12 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 14 # if 14==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r10,r10,ror#5 add r6,r6,r12 @ h+=Maj(a,b,c) from the past eor r0,r0,r10,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 14 add r6,r6,r12 @ h+=Maj(a,b,c) from the past ldrb r12,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r12,lsl#8 ldrb r12,[r1],#4 orr r2,r2,r0,lsl#16 # if 14==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r10,r10,ror#5 orr r2,r2,r12,lsl#24 eor r0,r0,r10,ror#19 @ Sigma1(e) #endif ldr r12,[r14],#4 @ *K256++ add r5,r5,r2 @ h+=X[i] str r2,[sp,#14*4] eor r2,r11,r4 add r5,r5,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r10 add r5,r5,r12 @ h+=K256[i] eor r2,r2,r4 @ Ch(e,f,g) eor r0,r6,r6,ror#11 add r5,r5,r2 @ h+=Ch(e,f,g) #if 14==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 14<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r6,r7 @ a^b, b^c in next round #else ldr r2,[sp,#0*4] @ from future BODY_16_xx eor r12,r6,r7 @ a^b, b^c in next round ldr r1,[sp,#13*4] @ from future BODY_16_xx #endif eor r0,r0,r6,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r9,r9,r5 @ d+=h eor r3,r3,r7 @ Maj(a,b,c) add r5,r5,r0,ror#2 @ h+=Sigma0(a) @ add r5,r5,r3 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 @ ldr r2,[r1],#4 @ 15 # if 15==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r9,r9,ror#5 add r5,r5,r3 @ h+=Maj(a,b,c) from the past eor r0,r0,r9,ror#19 @ Sigma1(e) # ifndef __ARMEB__ rev r2,r2 # endif #else @ ldrb r2,[r1,#3] @ 15 add r5,r5,r3 @ h+=Maj(a,b,c) from the past ldrb r3,[r1,#2] ldrb r0,[r1,#1] orr r2,r2,r3,lsl#8 ldrb r3,[r1],#4 orr r2,r2,r0,lsl#16 # if 15==15 str r1,[sp,#17*4] @ make room for r1 # endif eor r0,r9,r9,ror#5 orr r2,r2,r3,lsl#24 eor r0,r0,r9,ror#19 @ Sigma1(e) #endif ldr r3,[r14],#4 @ *K256++ add r4,r4,r2 @ h+=X[i] str r2,[sp,#15*4] eor r2,r10,r11 add r4,r4,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r9 add r4,r4,r3 @ h+=K256[i] eor r2,r2,r11 @ Ch(e,f,g) eor r0,r5,r5,ror#11 add r4,r4,r2 @ h+=Ch(e,f,g) #if 15==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 15<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r5,r6 @ a^b, b^c in next round #else ldr r2,[sp,#1*4] @ from future BODY_16_xx eor r3,r5,r6 @ a^b, b^c in next round ldr r1,[sp,#14*4] @ from future BODY_16_xx #endif eor r0,r0,r5,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r8,r8,r4 @ d+=h eor r12,r12,r6 @ Maj(a,b,c) add r4,r4,r0,ror#2 @ h+=Sigma0(a) @ add r4,r4,r12 @ h+=Maj(a,b,c) .Lrounds_16_xx: @ ldr r2,[sp,#1*4] @ 16 @ ldr r1,[sp,#14*4] mov r0,r2,ror#7 add r4,r4,r12 @ h+=Maj(a,b,c) from the past mov r12,r1,ror#17 eor r0,r0,r2,ror#18 eor r12,r12,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#0*4] eor r12,r12,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#9*4] add r12,r12,r0 eor r0,r8,r8,ror#5 @ from BODY_00_15 add r2,r2,r12 eor r0,r0,r8,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r12,[r14],#4 @ *K256++ add r11,r11,r2 @ h+=X[i] str r2,[sp,#0*4] eor r2,r9,r10 add r11,r11,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r8 add r11,r11,r12 @ h+=K256[i] eor r2,r2,r10 @ Ch(e,f,g) eor r0,r4,r4,ror#11 add r11,r11,r2 @ h+=Ch(e,f,g) #if 16==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 16<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r4,r5 @ a^b, b^c in next round #else ldr r2,[sp,#2*4] @ from future BODY_16_xx eor r12,r4,r5 @ a^b, b^c in next round ldr r1,[sp,#15*4] @ from future BODY_16_xx #endif eor r0,r0,r4,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r7,r7,r11 @ d+=h eor r3,r3,r5 @ Maj(a,b,c) add r11,r11,r0,ror#2 @ h+=Sigma0(a) @ add r11,r11,r3 @ h+=Maj(a,b,c) @ ldr r2,[sp,#2*4] @ 17 @ ldr r1,[sp,#15*4] mov r0,r2,ror#7 add r11,r11,r3 @ h+=Maj(a,b,c) from the past mov r3,r1,ror#17 eor r0,r0,r2,ror#18 eor r3,r3,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#1*4] eor r3,r3,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#10*4] add r3,r3,r0 eor r0,r7,r7,ror#5 @ from BODY_00_15 add r2,r2,r3 eor r0,r0,r7,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r3,[r14],#4 @ *K256++ add r10,r10,r2 @ h+=X[i] str r2,[sp,#1*4] eor r2,r8,r9 add r10,r10,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r7 add r10,r10,r3 @ h+=K256[i] eor r2,r2,r9 @ Ch(e,f,g) eor r0,r11,r11,ror#11 add r10,r10,r2 @ h+=Ch(e,f,g) #if 17==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 17<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r11,r4 @ a^b, b^c in next round #else ldr r2,[sp,#3*4] @ from future BODY_16_xx eor r3,r11,r4 @ a^b, b^c in next round ldr r1,[sp,#0*4] @ from future BODY_16_xx #endif eor r0,r0,r11,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r6,r6,r10 @ d+=h eor r12,r12,r4 @ Maj(a,b,c) add r10,r10,r0,ror#2 @ h+=Sigma0(a) @ add r10,r10,r12 @ h+=Maj(a,b,c) @ ldr r2,[sp,#3*4] @ 18 @ ldr r1,[sp,#0*4] mov r0,r2,ror#7 add r10,r10,r12 @ h+=Maj(a,b,c) from the past mov r12,r1,ror#17 eor r0,r0,r2,ror#18 eor r12,r12,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#2*4] eor r12,r12,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#11*4] add r12,r12,r0 eor r0,r6,r6,ror#5 @ from BODY_00_15 add r2,r2,r12 eor r0,r0,r6,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r12,[r14],#4 @ *K256++ add r9,r9,r2 @ h+=X[i] str r2,[sp,#2*4] eor r2,r7,r8 add r9,r9,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r6 add r9,r9,r12 @ h+=K256[i] eor r2,r2,r8 @ Ch(e,f,g) eor r0,r10,r10,ror#11 add r9,r9,r2 @ h+=Ch(e,f,g) #if 18==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 18<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r10,r11 @ a^b, b^c in next round #else ldr r2,[sp,#4*4] @ from future BODY_16_xx eor r12,r10,r11 @ a^b, b^c in next round ldr r1,[sp,#1*4] @ from future BODY_16_xx #endif eor r0,r0,r10,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r5,r5,r9 @ d+=h eor r3,r3,r11 @ Maj(a,b,c) add r9,r9,r0,ror#2 @ h+=Sigma0(a) @ add r9,r9,r3 @ h+=Maj(a,b,c) @ ldr r2,[sp,#4*4] @ 19 @ ldr r1,[sp,#1*4] mov r0,r2,ror#7 add r9,r9,r3 @ h+=Maj(a,b,c) from the past mov r3,r1,ror#17 eor r0,r0,r2,ror#18 eor r3,r3,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#3*4] eor r3,r3,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#12*4] add r3,r3,r0 eor r0,r5,r5,ror#5 @ from BODY_00_15 add r2,r2,r3 eor r0,r0,r5,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r3,[r14],#4 @ *K256++ add r8,r8,r2 @ h+=X[i] str r2,[sp,#3*4] eor r2,r6,r7 add r8,r8,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r5 add r8,r8,r3 @ h+=K256[i] eor r2,r2,r7 @ Ch(e,f,g) eor r0,r9,r9,ror#11 add r8,r8,r2 @ h+=Ch(e,f,g) #if 19==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 19<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r9,r10 @ a^b, b^c in next round #else ldr r2,[sp,#5*4] @ from future BODY_16_xx eor r3,r9,r10 @ a^b, b^c in next round ldr r1,[sp,#2*4] @ from future BODY_16_xx #endif eor r0,r0,r9,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r4,r4,r8 @ d+=h eor r12,r12,r10 @ Maj(a,b,c) add r8,r8,r0,ror#2 @ h+=Sigma0(a) @ add r8,r8,r12 @ h+=Maj(a,b,c) @ ldr r2,[sp,#5*4] @ 20 @ ldr r1,[sp,#2*4] mov r0,r2,ror#7 add r8,r8,r12 @ h+=Maj(a,b,c) from the past mov r12,r1,ror#17 eor r0,r0,r2,ror#18 eor r12,r12,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#4*4] eor r12,r12,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#13*4] add r12,r12,r0 eor r0,r4,r4,ror#5 @ from BODY_00_15 add r2,r2,r12 eor r0,r0,r4,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r12,[r14],#4 @ *K256++ add r7,r7,r2 @ h+=X[i] str r2,[sp,#4*4] eor r2,r5,r6 add r7,r7,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r4 add r7,r7,r12 @ h+=K256[i] eor r2,r2,r6 @ Ch(e,f,g) eor r0,r8,r8,ror#11 add r7,r7,r2 @ h+=Ch(e,f,g) #if 20==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 20<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r8,r9 @ a^b, b^c in next round #else ldr r2,[sp,#6*4] @ from future BODY_16_xx eor r12,r8,r9 @ a^b, b^c in next round ldr r1,[sp,#3*4] @ from future BODY_16_xx #endif eor r0,r0,r8,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r11,r11,r7 @ d+=h eor r3,r3,r9 @ Maj(a,b,c) add r7,r7,r0,ror#2 @ h+=Sigma0(a) @ add r7,r7,r3 @ h+=Maj(a,b,c) @ ldr r2,[sp,#6*4] @ 21 @ ldr r1,[sp,#3*4] mov r0,r2,ror#7 add r7,r7,r3 @ h+=Maj(a,b,c) from the past mov r3,r1,ror#17 eor r0,r0,r2,ror#18 eor r3,r3,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#5*4] eor r3,r3,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#14*4] add r3,r3,r0 eor r0,r11,r11,ror#5 @ from BODY_00_15 add r2,r2,r3 eor r0,r0,r11,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r3,[r14],#4 @ *K256++ add r6,r6,r2 @ h+=X[i] str r2,[sp,#5*4] eor r2,r4,r5 add r6,r6,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r11 add r6,r6,r3 @ h+=K256[i] eor r2,r2,r5 @ Ch(e,f,g) eor r0,r7,r7,ror#11 add r6,r6,r2 @ h+=Ch(e,f,g) #if 21==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 21<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r7,r8 @ a^b, b^c in next round #else ldr r2,[sp,#7*4] @ from future BODY_16_xx eor r3,r7,r8 @ a^b, b^c in next round ldr r1,[sp,#4*4] @ from future BODY_16_xx #endif eor r0,r0,r7,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r10,r10,r6 @ d+=h eor r12,r12,r8 @ Maj(a,b,c) add r6,r6,r0,ror#2 @ h+=Sigma0(a) @ add r6,r6,r12 @ h+=Maj(a,b,c) @ ldr r2,[sp,#7*4] @ 22 @ ldr r1,[sp,#4*4] mov r0,r2,ror#7 add r6,r6,r12 @ h+=Maj(a,b,c) from the past mov r12,r1,ror#17 eor r0,r0,r2,ror#18 eor r12,r12,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#6*4] eor r12,r12,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#15*4] add r12,r12,r0 eor r0,r10,r10,ror#5 @ from BODY_00_15 add r2,r2,r12 eor r0,r0,r10,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r12,[r14],#4 @ *K256++ add r5,r5,r2 @ h+=X[i] str r2,[sp,#6*4] eor r2,r11,r4 add r5,r5,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r10 add r5,r5,r12 @ h+=K256[i] eor r2,r2,r4 @ Ch(e,f,g) eor r0,r6,r6,ror#11 add r5,r5,r2 @ h+=Ch(e,f,g) #if 22==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 22<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r6,r7 @ a^b, b^c in next round #else ldr r2,[sp,#8*4] @ from future BODY_16_xx eor r12,r6,r7 @ a^b, b^c in next round ldr r1,[sp,#5*4] @ from future BODY_16_xx #endif eor r0,r0,r6,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r9,r9,r5 @ d+=h eor r3,r3,r7 @ Maj(a,b,c) add r5,r5,r0,ror#2 @ h+=Sigma0(a) @ add r5,r5,r3 @ h+=Maj(a,b,c) @ ldr r2,[sp,#8*4] @ 23 @ ldr r1,[sp,#5*4] mov r0,r2,ror#7 add r5,r5,r3 @ h+=Maj(a,b,c) from the past mov r3,r1,ror#17 eor r0,r0,r2,ror#18 eor r3,r3,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#7*4] eor r3,r3,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#0*4] add r3,r3,r0 eor r0,r9,r9,ror#5 @ from BODY_00_15 add r2,r2,r3 eor r0,r0,r9,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r3,[r14],#4 @ *K256++ add r4,r4,r2 @ h+=X[i] str r2,[sp,#7*4] eor r2,r10,r11 add r4,r4,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r9 add r4,r4,r3 @ h+=K256[i] eor r2,r2,r11 @ Ch(e,f,g) eor r0,r5,r5,ror#11 add r4,r4,r2 @ h+=Ch(e,f,g) #if 23==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 23<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r5,r6 @ a^b, b^c in next round #else ldr r2,[sp,#9*4] @ from future BODY_16_xx eor r3,r5,r6 @ a^b, b^c in next round ldr r1,[sp,#6*4] @ from future BODY_16_xx #endif eor r0,r0,r5,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r8,r8,r4 @ d+=h eor r12,r12,r6 @ Maj(a,b,c) add r4,r4,r0,ror#2 @ h+=Sigma0(a) @ add r4,r4,r12 @ h+=Maj(a,b,c) @ ldr r2,[sp,#9*4] @ 24 @ ldr r1,[sp,#6*4] mov r0,r2,ror#7 add r4,r4,r12 @ h+=Maj(a,b,c) from the past mov r12,r1,ror#17 eor r0,r0,r2,ror#18 eor r12,r12,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#8*4] eor r12,r12,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#1*4] add r12,r12,r0 eor r0,r8,r8,ror#5 @ from BODY_00_15 add r2,r2,r12 eor r0,r0,r8,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r12,[r14],#4 @ *K256++ add r11,r11,r2 @ h+=X[i] str r2,[sp,#8*4] eor r2,r9,r10 add r11,r11,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r8 add r11,r11,r12 @ h+=K256[i] eor r2,r2,r10 @ Ch(e,f,g) eor r0,r4,r4,ror#11 add r11,r11,r2 @ h+=Ch(e,f,g) #if 24==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 24<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r4,r5 @ a^b, b^c in next round #else ldr r2,[sp,#10*4] @ from future BODY_16_xx eor r12,r4,r5 @ a^b, b^c in next round ldr r1,[sp,#7*4] @ from future BODY_16_xx #endif eor r0,r0,r4,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r7,r7,r11 @ d+=h eor r3,r3,r5 @ Maj(a,b,c) add r11,r11,r0,ror#2 @ h+=Sigma0(a) @ add r11,r11,r3 @ h+=Maj(a,b,c) @ ldr r2,[sp,#10*4] @ 25 @ ldr r1,[sp,#7*4] mov r0,r2,ror#7 add r11,r11,r3 @ h+=Maj(a,b,c) from the past mov r3,r1,ror#17 eor r0,r0,r2,ror#18 eor r3,r3,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#9*4] eor r3,r3,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#2*4] add r3,r3,r0 eor r0,r7,r7,ror#5 @ from BODY_00_15 add r2,r2,r3 eor r0,r0,r7,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r3,[r14],#4 @ *K256++ add r10,r10,r2 @ h+=X[i] str r2,[sp,#9*4] eor r2,r8,r9 add r10,r10,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r7 add r10,r10,r3 @ h+=K256[i] eor r2,r2,r9 @ Ch(e,f,g) eor r0,r11,r11,ror#11 add r10,r10,r2 @ h+=Ch(e,f,g) #if 25==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 25<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r11,r4 @ a^b, b^c in next round #else ldr r2,[sp,#11*4] @ from future BODY_16_xx eor r3,r11,r4 @ a^b, b^c in next round ldr r1,[sp,#8*4] @ from future BODY_16_xx #endif eor r0,r0,r11,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r6,r6,r10 @ d+=h eor r12,r12,r4 @ Maj(a,b,c) add r10,r10,r0,ror#2 @ h+=Sigma0(a) @ add r10,r10,r12 @ h+=Maj(a,b,c) @ ldr r2,[sp,#11*4] @ 26 @ ldr r1,[sp,#8*4] mov r0,r2,ror#7 add r10,r10,r12 @ h+=Maj(a,b,c) from the past mov r12,r1,ror#17 eor r0,r0,r2,ror#18 eor r12,r12,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#10*4] eor r12,r12,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#3*4] add r12,r12,r0 eor r0,r6,r6,ror#5 @ from BODY_00_15 add r2,r2,r12 eor r0,r0,r6,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r12,[r14],#4 @ *K256++ add r9,r9,r2 @ h+=X[i] str r2,[sp,#10*4] eor r2,r7,r8 add r9,r9,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r6 add r9,r9,r12 @ h+=K256[i] eor r2,r2,r8 @ Ch(e,f,g) eor r0,r10,r10,ror#11 add r9,r9,r2 @ h+=Ch(e,f,g) #if 26==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 26<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r10,r11 @ a^b, b^c in next round #else ldr r2,[sp,#12*4] @ from future BODY_16_xx eor r12,r10,r11 @ a^b, b^c in next round ldr r1,[sp,#9*4] @ from future BODY_16_xx #endif eor r0,r0,r10,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r5,r5,r9 @ d+=h eor r3,r3,r11 @ Maj(a,b,c) add r9,r9,r0,ror#2 @ h+=Sigma0(a) @ add r9,r9,r3 @ h+=Maj(a,b,c) @ ldr r2,[sp,#12*4] @ 27 @ ldr r1,[sp,#9*4] mov r0,r2,ror#7 add r9,r9,r3 @ h+=Maj(a,b,c) from the past mov r3,r1,ror#17 eor r0,r0,r2,ror#18 eor r3,r3,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#11*4] eor r3,r3,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#4*4] add r3,r3,r0 eor r0,r5,r5,ror#5 @ from BODY_00_15 add r2,r2,r3 eor r0,r0,r5,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r3,[r14],#4 @ *K256++ add r8,r8,r2 @ h+=X[i] str r2,[sp,#11*4] eor r2,r6,r7 add r8,r8,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r5 add r8,r8,r3 @ h+=K256[i] eor r2,r2,r7 @ Ch(e,f,g) eor r0,r9,r9,ror#11 add r8,r8,r2 @ h+=Ch(e,f,g) #if 27==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 27<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r9,r10 @ a^b, b^c in next round #else ldr r2,[sp,#13*4] @ from future BODY_16_xx eor r3,r9,r10 @ a^b, b^c in next round ldr r1,[sp,#10*4] @ from future BODY_16_xx #endif eor r0,r0,r9,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r4,r4,r8 @ d+=h eor r12,r12,r10 @ Maj(a,b,c) add r8,r8,r0,ror#2 @ h+=Sigma0(a) @ add r8,r8,r12 @ h+=Maj(a,b,c) @ ldr r2,[sp,#13*4] @ 28 @ ldr r1,[sp,#10*4] mov r0,r2,ror#7 add r8,r8,r12 @ h+=Maj(a,b,c) from the past mov r12,r1,ror#17 eor r0,r0,r2,ror#18 eor r12,r12,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#12*4] eor r12,r12,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#5*4] add r12,r12,r0 eor r0,r4,r4,ror#5 @ from BODY_00_15 add r2,r2,r12 eor r0,r0,r4,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r12,[r14],#4 @ *K256++ add r7,r7,r2 @ h+=X[i] str r2,[sp,#12*4] eor r2,r5,r6 add r7,r7,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r4 add r7,r7,r12 @ h+=K256[i] eor r2,r2,r6 @ Ch(e,f,g) eor r0,r8,r8,ror#11 add r7,r7,r2 @ h+=Ch(e,f,g) #if 28==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 28<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r8,r9 @ a^b, b^c in next round #else ldr r2,[sp,#14*4] @ from future BODY_16_xx eor r12,r8,r9 @ a^b, b^c in next round ldr r1,[sp,#11*4] @ from future BODY_16_xx #endif eor r0,r0,r8,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r11,r11,r7 @ d+=h eor r3,r3,r9 @ Maj(a,b,c) add r7,r7,r0,ror#2 @ h+=Sigma0(a) @ add r7,r7,r3 @ h+=Maj(a,b,c) @ ldr r2,[sp,#14*4] @ 29 @ ldr r1,[sp,#11*4] mov r0,r2,ror#7 add r7,r7,r3 @ h+=Maj(a,b,c) from the past mov r3,r1,ror#17 eor r0,r0,r2,ror#18 eor r3,r3,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#13*4] eor r3,r3,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#6*4] add r3,r3,r0 eor r0,r11,r11,ror#5 @ from BODY_00_15 add r2,r2,r3 eor r0,r0,r11,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r3,[r14],#4 @ *K256++ add r6,r6,r2 @ h+=X[i] str r2,[sp,#13*4] eor r2,r4,r5 add r6,r6,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r11 add r6,r6,r3 @ h+=K256[i] eor r2,r2,r5 @ Ch(e,f,g) eor r0,r7,r7,ror#11 add r6,r6,r2 @ h+=Ch(e,f,g) #if 29==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 29<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r7,r8 @ a^b, b^c in next round #else ldr r2,[sp,#15*4] @ from future BODY_16_xx eor r3,r7,r8 @ a^b, b^c in next round ldr r1,[sp,#12*4] @ from future BODY_16_xx #endif eor r0,r0,r7,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r10,r10,r6 @ d+=h eor r12,r12,r8 @ Maj(a,b,c) add r6,r6,r0,ror#2 @ h+=Sigma0(a) @ add r6,r6,r12 @ h+=Maj(a,b,c) @ ldr r2,[sp,#15*4] @ 30 @ ldr r1,[sp,#12*4] mov r0,r2,ror#7 add r6,r6,r12 @ h+=Maj(a,b,c) from the past mov r12,r1,ror#17 eor r0,r0,r2,ror#18 eor r12,r12,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#14*4] eor r12,r12,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#7*4] add r12,r12,r0 eor r0,r10,r10,ror#5 @ from BODY_00_15 add r2,r2,r12 eor r0,r0,r10,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r12,[r14],#4 @ *K256++ add r5,r5,r2 @ h+=X[i] str r2,[sp,#14*4] eor r2,r11,r4 add r5,r5,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r10 add r5,r5,r12 @ h+=K256[i] eor r2,r2,r4 @ Ch(e,f,g) eor r0,r6,r6,ror#11 add r5,r5,r2 @ h+=Ch(e,f,g) #if 30==31 and r12,r12,#0xff cmp r12,#0xf2 @ done? #endif #if 30<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r12,r6,r7 @ a^b, b^c in next round #else ldr r2,[sp,#0*4] @ from future BODY_16_xx eor r12,r6,r7 @ a^b, b^c in next round ldr r1,[sp,#13*4] @ from future BODY_16_xx #endif eor r0,r0,r6,ror#20 @ Sigma0(a) and r3,r3,r12 @ (b^c)&=(a^b) add r9,r9,r5 @ d+=h eor r3,r3,r7 @ Maj(a,b,c) add r5,r5,r0,ror#2 @ h+=Sigma0(a) @ add r5,r5,r3 @ h+=Maj(a,b,c) @ ldr r2,[sp,#0*4] @ 31 @ ldr r1,[sp,#13*4] mov r0,r2,ror#7 add r5,r5,r3 @ h+=Maj(a,b,c) from the past mov r3,r1,ror#17 eor r0,r0,r2,ror#18 eor r3,r3,r1,ror#19 eor r0,r0,r2,lsr#3 @ sigma0(X[i+1]) ldr r2,[sp,#15*4] eor r3,r3,r1,lsr#10 @ sigma1(X[i+14]) ldr r1,[sp,#8*4] add r3,r3,r0 eor r0,r9,r9,ror#5 @ from BODY_00_15 add r2,r2,r3 eor r0,r0,r9,ror#19 @ Sigma1(e) add r2,r2,r1 @ X[i] ldr r3,[r14],#4 @ *K256++ add r4,r4,r2 @ h+=X[i] str r2,[sp,#15*4] eor r2,r10,r11 add r4,r4,r0,ror#6 @ h+=Sigma1(e) and r2,r2,r9 add r4,r4,r3 @ h+=K256[i] eor r2,r2,r11 @ Ch(e,f,g) eor r0,r5,r5,ror#11 add r4,r4,r2 @ h+=Ch(e,f,g) #if 31==31 and r3,r3,#0xff cmp r3,#0xf2 @ done? #endif #if 31<15 # if __ARM_ARCH>=7 ldr r2,[r1],#4 @ prefetch # else ldrb r2,[r1,#3] # endif eor r3,r5,r6 @ a^b, b^c in next round #else ldr r2,[sp,#1*4] @ from future BODY_16_xx eor r3,r5,r6 @ a^b, b^c in next round ldr r1,[sp,#14*4] @ from future BODY_16_xx #endif eor r0,r0,r5,ror#20 @ Sigma0(a) and r12,r12,r3 @ (b^c)&=(a^b) add r8,r8,r4 @ d+=h eor r12,r12,r6 @ Maj(a,b,c) add r4,r4,r0,ror#2 @ h+=Sigma0(a) @ add r4,r4,r12 @ h+=Maj(a,b,c) #if __ARM_ARCH>=7 ite eq @ Thumb2 thing, sanity check in ARM #endif ldreq r3,[sp,#16*4] @ pull ctx bne .Lrounds_16_xx add r4,r4,r12 @ h+=Maj(a,b,c) from the past ldr r0,[r3,#0] ldr r2,[r3,#4] ldr r12,[r3,#8] add r4,r4,r0 ldr r0,[r3,#12] add r5,r5,r2 ldr r2,[r3,#16] add r6,r6,r12 ldr r12,[r3,#20] add r7,r7,r0 ldr r0,[r3,#24] add r8,r8,r2 ldr r2,[r3,#28] add r9,r9,r12 ldr r1,[sp,#17*4] @ pull inp ldr r12,[sp,#18*4] @ pull inp+len add r10,r10,r0 add r11,r11,r2 stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11} cmp r1,r12 sub r14,r14,#256 @ rewind Ktbl bne .Loop add sp,sp,#19*4 @ destroy frame #if __ARM_ARCH>=5 ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,pc} #else ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,lr} tst lr,#1 moveq pc,lr @ be binary compatible with V4, yet .word 0xe12fff1e @ interoperable with Thumb ISA:-) #endif .size sha256_block_data_order_nohw,.-sha256_block_data_order_nohw #if __ARM_MAX_ARCH__>=7 .arch armv7-a .fpu neon .LK256_shortcut_neon: @ PC is 8 bytes ahead in Arm mode and 4 bytes ahead in Thumb mode. #if defined(__thumb2__) .word K256-(.LK256_add_neon+4) #else .word K256-(.LK256_add_neon+8) #endif .globl sha256_block_data_order_neon .hidden sha256_block_data_order_neon .type sha256_block_data_order_neon,%function .align 5 .skip 16 sha256_block_data_order_neon: stmdb sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} sub r11,sp,#16*4+16 @ K256 is just at the boundary of being easily referenced by an ADR from @ this function. In Arm mode, when building with __ARM_ARCH=6, it does @ not fit. By moving code around, we could make it fit, but this is too @ fragile. For simplicity, just load the offset from @ .LK256_shortcut_neon. @ @ TODO(davidben): adrl would avoid a load, but clang-assembler does not @ support it. We might be able to emulate it with a macro, but Android's @ did not work when I tried it. @ https://android.googlesource.com/platform/ndk/+/refs/heads/master/docs/ClangMigration.md#arm ldr r14,.LK256_shortcut_neon .LK256_add_neon: add r14,pc,r14 bic r11,r11,#15 @ align for 128-bit stores mov r12,sp mov sp,r11 @ alloca add r2,r1,r2,lsl#6 @ len to point at the end of inp vld1.8 {q0},[r1]! vld1.8 {q1},[r1]! vld1.8 {q2},[r1]! vld1.8 {q3},[r1]! vld1.32 {q8},[r14,:128]! vld1.32 {q9},[r14,:128]! vld1.32 {q10},[r14,:128]! vld1.32 {q11},[r14,:128]! vrev32.8 q0,q0 @ yes, even on str r0,[sp,#64] vrev32.8 q1,q1 @ big-endian str r1,[sp,#68] mov r1,sp vrev32.8 q2,q2 str r2,[sp,#72] vrev32.8 q3,q3 str r12,[sp,#76] @ save original sp vadd.i32 q8,q8,q0 vadd.i32 q9,q9,q1 vst1.32 {q8},[r1,:128]! vadd.i32 q10,q10,q2 vst1.32 {q9},[r1,:128]! vadd.i32 q11,q11,q3 vst1.32 {q10},[r1,:128]! vst1.32 {q11},[r1,:128]! ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11} sub r1,r1,#64 ldr r2,[sp,#0] eor r12,r12,r12 eor r3,r5,r6 b .L_00_48 .align 4 .L_00_48: vext.8 q8,q0,q1,#4 add r11,r11,r2 eor r2,r9,r10 eor r0,r8,r8,ror#5 vext.8 q9,q2,q3,#4 add r4,r4,r12 and r2,r2,r8 eor r12,r0,r8,ror#19 vshr.u32 q10,q8,#7 eor r0,r4,r4,ror#11 eor r2,r2,r10 vadd.i32 q0,q0,q9 add r11,r11,r12,ror#6 eor r12,r4,r5 vshr.u32 q9,q8,#3 eor r0,r0,r4,ror#20 add r11,r11,r2 vsli.32 q10,q8,#25 ldr r2,[sp,#4] and r3,r3,r12 vshr.u32 q11,q8,#18 add r7,r7,r11 add r11,r11,r0,ror#2 eor r3,r3,r5 veor q9,q9,q10 add r10,r10,r2 vsli.32 q11,q8,#14 eor r2,r8,r9 eor r0,r7,r7,ror#5 vshr.u32 d24,d7,#17 add r11,r11,r3 and r2,r2,r7 veor q9,q9,q11 eor r3,r0,r7,ror#19 eor r0,r11,r11,ror#11 vsli.32 d24,d7,#15 eor r2,r2,r9 add r10,r10,r3,ror#6 vshr.u32 d25,d7,#10 eor r3,r11,r4 eor r0,r0,r11,ror#20 vadd.i32 q0,q0,q9 add r10,r10,r2 ldr r2,[sp,#8] veor d25,d25,d24 and r12,r12,r3 add r6,r6,r10 vshr.u32 d24,d7,#19 add r10,r10,r0,ror#2 eor r12,r12,r4 vsli.32 d24,d7,#13 add r9,r9,r2 eor r2,r7,r8 veor d25,d25,d24 eor r0,r6,r6,ror#5 add r10,r10,r12 vadd.i32 d0,d0,d25 and r2,r2,r6 eor r12,r0,r6,ror#19 vshr.u32 d24,d0,#17 eor r0,r10,r10,ror#11 eor r2,r2,r8 vsli.32 d24,d0,#15 add r9,r9,r12,ror#6 eor r12,r10,r11 vshr.u32 d25,d0,#10 eor r0,r0,r10,ror#20 add r9,r9,r2 veor d25,d25,d24 ldr r2,[sp,#12] and r3,r3,r12 vshr.u32 d24,d0,#19 add r5,r5,r9 add r9,r9,r0,ror#2 eor r3,r3,r11 vld1.32 {q8},[r14,:128]! add r8,r8,r2 vsli.32 d24,d0,#13 eor r2,r6,r7 eor r0,r5,r5,ror#5 veor d25,d25,d24 add r9,r9,r3 and r2,r2,r5 vadd.i32 d1,d1,d25 eor r3,r0,r5,ror#19 eor r0,r9,r9,ror#11 vadd.i32 q8,q8,q0 eor r2,r2,r7 add r8,r8,r3,ror#6 eor r3,r9,r10 eor r0,r0,r9,ror#20 add r8,r8,r2 ldr r2,[sp,#16] and r12,r12,r3 add r4,r4,r8 vst1.32 {q8},[r1,:128]! add r8,r8,r0,ror#2 eor r12,r12,r10 vext.8 q8,q1,q2,#4 add r7,r7,r2 eor r2,r5,r6 eor r0,r4,r4,ror#5 vext.8 q9,q3,q0,#4 add r8,r8,r12 and r2,r2,r4 eor r12,r0,r4,ror#19 vshr.u32 q10,q8,#7 eor r0,r8,r8,ror#11 eor r2,r2,r6 vadd.i32 q1,q1,q9 add r7,r7,r12,ror#6 eor r12,r8,r9 vshr.u32 q9,q8,#3 eor r0,r0,r8,ror#20 add r7,r7,r2 vsli.32 q10,q8,#25 ldr r2,[sp,#20] and r3,r3,r12 vshr.u32 q11,q8,#18 add r11,r11,r7 add r7,r7,r0,ror#2 eor r3,r3,r9 veor q9,q9,q10 add r6,r6,r2 vsli.32 q11,q8,#14 eor r2,r4,r5 eor r0,r11,r11,ror#5 vshr.u32 d24,d1,#17 add r7,r7,r3 and r2,r2,r11 veor q9,q9,q11 eor r3,r0,r11,ror#19 eor r0,r7,r7,ror#11 vsli.32 d24,d1,#15 eor r2,r2,r5 add r6,r6,r3,ror#6 vshr.u32 d25,d1,#10 eor r3,r7,r8 eor r0,r0,r7,ror#20 vadd.i32 q1,q1,q9 add r6,r6,r2 ldr r2,[sp,#24] veor d25,d25,d24 and r12,r12,r3 add r10,r10,r6 vshr.u32 d24,d1,#19 add r6,r6,r0,ror#2 eor r12,r12,r8 vsli.32 d24,d1,#13 add r5,r5,r2 eor r2,r11,r4 veor d25,d25,d24 eor r0,r10,r10,ror#5 add r6,r6,r12 vadd.i32 d2,d2,d25 and r2,r2,r10 eor r12,r0,r10,ror#19 vshr.u32 d24,d2,#17 eor r0,r6,r6,ror#11 eor r2,r2,r4 vsli.32 d24,d2,#15 add r5,r5,r12,ror#6 eor r12,r6,r7 vshr.u32 d25,d2,#10 eor r0,r0,r6,ror#20 add r5,r5,r2 veor d25,d25,d24 ldr r2,[sp,#28] and r3,r3,r12 vshr.u32 d24,d2,#19 add r9,r9,r5 add r5,r5,r0,ror#2 eor r3,r3,r7 vld1.32 {q8},[r14,:128]! add r4,r4,r2 vsli.32 d24,d2,#13 eor r2,r10,r11 eor r0,r9,r9,ror#5 veor d25,d25,d24 add r5,r5,r3 and r2,r2,r9 vadd.i32 d3,d3,d25 eor r3,r0,r9,ror#19 eor r0,r5,r5,ror#11 vadd.i32 q8,q8,q1 eor r2,r2,r11 add r4,r4,r3,ror#6 eor r3,r5,r6 eor r0,r0,r5,ror#20 add r4,r4,r2 ldr r2,[sp,#32] and r12,r12,r3 add r8,r8,r4 vst1.32 {q8},[r1,:128]! add r4,r4,r0,ror#2 eor r12,r12,r6 vext.8 q8,q2,q3,#4 add r11,r11,r2 eor r2,r9,r10 eor r0,r8,r8,ror#5 vext.8 q9,q0,q1,#4 add r4,r4,r12 and r2,r2,r8 eor r12,r0,r8,ror#19 vshr.u32 q10,q8,#7 eor r0,r4,r4,ror#11 eor r2,r2,r10 vadd.i32 q2,q2,q9 add r11,r11,r12,ror#6 eor r12,r4,r5 vshr.u32 q9,q8,#3 eor r0,r0,r4,ror#20 add r11,r11,r2 vsli.32 q10,q8,#25 ldr r2,[sp,#36] and r3,r3,r12 vshr.u32 q11,q8,#18 add r7,r7,r11 add r11,r11,r0,ror#2 eor r3,r3,r5 veor q9,q9,q10 add r10,r10,r2 vsli.32 q11,q8,#14 eor r2,r8,r9 eor r0,r7,r7,ror#5 vshr.u32 d24,d3,#17 add r11,r11,r3 and r2,r2,r7 veor q9,q9,q11 eor r3,r0,r7,ror#19 eor r0,r11,r11,ror#11 vsli.32 d24,d3,#15 eor r2,r2,r9 add r10,r10,r3,ror#6 vshr.u32 d25,d3,#10 eor r3,r11,r4 eor r0,r0,r11,ror#20 vadd.i32 q2,q2,q9 add r10,r10,r2 ldr r2,[sp,#40] veor d25,d25,d24 and r12,r12,r3 add r6,r6,r10 vshr.u32 d24,d3,#19 add r10,r10,r0,ror#2 eor r12,r12,r4 vsli.32 d24,d3,#13 add r9,r9,r2 eor r2,r7,r8 veor d25,d25,d24 eor r0,r6,r6,ror#5 add r10,r10,r12 vadd.i32 d4,d4,d25 and r2,r2,r6 eor r12,r0,r6,ror#19 vshr.u32 d24,d4,#17 eor r0,r10,r10,ror#11 eor r2,r2,r8 vsli.32 d24,d4,#15 add r9,r9,r12,ror#6 eor r12,r10,r11 vshr.u32 d25,d4,#10 eor r0,r0,r10,ror#20 add r9,r9,r2 veor d25,d25,d24 ldr r2,[sp,#44] and r3,r3,r12 vshr.u32 d24,d4,#19 add r5,r5,r9 add r9,r9,r0,ror#2 eor r3,r3,r11 vld1.32 {q8},[r14,:128]! add r8,r8,r2 vsli.32 d24,d4,#13 eor r2,r6,r7 eor r0,r5,r5,ror#5 veor d25,d25,d24 add r9,r9,r3 and r2,r2,r5 vadd.i32 d5,d5,d25 eor r3,r0,r5,ror#19 eor r0,r9,r9,ror#11 vadd.i32 q8,q8,q2 eor r2,r2,r7 add r8,r8,r3,ror#6 eor r3,r9,r10 eor r0,r0,r9,ror#20 add r8,r8,r2 ldr r2,[sp,#48] and r12,r12,r3 add r4,r4,r8 vst1.32 {q8},[r1,:128]! add r8,r8,r0,ror#2 eor r12,r12,r10 vext.8 q8,q3,q0,#4 add r7,r7,r2 eor r2,r5,r6 eor r0,r4,r4,ror#5 vext.8 q9,q1,q2,#4 add r8,r8,r12 and r2,r2,r4 eor r12,r0,r4,ror#19 vshr.u32 q10,q8,#7 eor r0,r8,r8,ror#11 eor r2,r2,r6 vadd.i32 q3,q3,q9 add r7,r7,r12,ror#6 eor r12,r8,r9 vshr.u32 q9,q8,#3 eor r0,r0,r8,ror#20 add r7,r7,r2 vsli.32 q10,q8,#25 ldr r2,[sp,#52] and r3,r3,r12 vshr.u32 q11,q8,#18 add r11,r11,r7 add r7,r7,r0,ror#2 eor r3,r3,r9 veor q9,q9,q10 add r6,r6,r2 vsli.32 q11,q8,#14 eor r2,r4,r5 eor r0,r11,r11,ror#5 vshr.u32 d24,d5,#17 add r7,r7,r3 and r2,r2,r11 veor q9,q9,q11 eor r3,r0,r11,ror#19 eor r0,r7,r7,ror#11 vsli.32 d24,d5,#15 eor r2,r2,r5 add r6,r6,r3,ror#6 vshr.u32 d25,d5,#10 eor r3,r7,r8 eor r0,r0,r7,ror#20 vadd.i32 q3,q3,q9 add r6,r6,r2 ldr r2,[sp,#56] veor d25,d25,d24 and r12,r12,r3 add r10,r10,r6 vshr.u32 d24,d5,#19 add r6,r6,r0,ror#2 eor r12,r12,r8 vsli.32 d24,d5,#13 add r5,r5,r2 eor r2,r11,r4 veor d25,d25,d24 eor r0,r10,r10,ror#5 add r6,r6,r12 vadd.i32 d6,d6,d25 and r2,r2,r10 eor r12,r0,r10,ror#19 vshr.u32 d24,d6,#17 eor r0,r6,r6,ror#11 eor r2,r2,r4 vsli.32 d24,d6,#15 add r5,r5,r12,ror#6 eor r12,r6,r7 vshr.u32 d25,d6,#10 eor r0,r0,r6,ror#20 add r5,r5,r2 veor d25,d25,d24 ldr r2,[sp,#60] and r3,r3,r12 vshr.u32 d24,d6,#19 add r9,r9,r5 add r5,r5,r0,ror#2 eor r3,r3,r7 vld1.32 {q8},[r14,:128]! add r4,r4,r2 vsli.32 d24,d6,#13 eor r2,r10,r11 eor r0,r9,r9,ror#5 veor d25,d25,d24 add r5,r5,r3 and r2,r2,r9 vadd.i32 d7,d7,d25 eor r3,r0,r9,ror#19 eor r0,r5,r5,ror#11 vadd.i32 q8,q8,q3 eor r2,r2,r11 add r4,r4,r3,ror#6 eor r3,r5,r6 eor r0,r0,r5,ror#20 add r4,r4,r2 ldr r2,[r14] and r12,r12,r3 add r8,r8,r4 vst1.32 {q8},[r1,:128]! add r4,r4,r0,ror#2 eor r12,r12,r6 teq r2,#0 @ check for K256 terminator ldr r2,[sp,#0] sub r1,r1,#64 bne .L_00_48 ldr r1,[sp,#68] ldr r0,[sp,#72] sub r14,r14,#256 @ rewind r14 teq r1,r0 it eq subeq r1,r1,#64 @ avoid SEGV vld1.8 {q0},[r1]! @ load next input block vld1.8 {q1},[r1]! vld1.8 {q2},[r1]! vld1.8 {q3},[r1]! it ne strne r1,[sp,#68] mov r1,sp add r11,r11,r2 eor r2,r9,r10 eor r0,r8,r8,ror#5 add r4,r4,r12 vld1.32 {q8},[r14,:128]! and r2,r2,r8 eor r12,r0,r8,ror#19 eor r0,r4,r4,ror#11 eor r2,r2,r10 vrev32.8 q0,q0 add r11,r11,r12,ror#6 eor r12,r4,r5 eor r0,r0,r4,ror#20 add r11,r11,r2 vadd.i32 q8,q8,q0 ldr r2,[sp,#4] and r3,r3,r12 add r7,r7,r11 add r11,r11,r0,ror#2 eor r3,r3,r5 add r10,r10,r2 eor r2,r8,r9 eor r0,r7,r7,ror#5 add r11,r11,r3 and r2,r2,r7 eor r3,r0,r7,ror#19 eor r0,r11,r11,ror#11 eor r2,r2,r9 add r10,r10,r3,ror#6 eor r3,r11,r4 eor r0,r0,r11,ror#20 add r10,r10,r2 ldr r2,[sp,#8] and r12,r12,r3 add r6,r6,r10 add r10,r10,r0,ror#2 eor r12,r12,r4 add r9,r9,r2 eor r2,r7,r8 eor r0,r6,r6,ror#5 add r10,r10,r12 and r2,r2,r6 eor r12,r0,r6,ror#19 eor r0,r10,r10,ror#11 eor r2,r2,r8 add r9,r9,r12,ror#6 eor r12,r10,r11 eor r0,r0,r10,ror#20 add r9,r9,r2 ldr r2,[sp,#12] and r3,r3,r12 add r5,r5,r9 add r9,r9,r0,ror#2 eor r3,r3,r11 add r8,r8,r2 eor r2,r6,r7 eor r0,r5,r5,ror#5 add r9,r9,r3 and r2,r2,r5 eor r3,r0,r5,ror#19 eor r0,r9,r9,ror#11 eor r2,r2,r7 add r8,r8,r3,ror#6 eor r3,r9,r10 eor r0,r0,r9,ror#20 add r8,r8,r2 ldr r2,[sp,#16] and r12,r12,r3 add r4,r4,r8 add r8,r8,r0,ror#2 eor r12,r12,r10 vst1.32 {q8},[r1,:128]! add r7,r7,r2 eor r2,r5,r6 eor r0,r4,r4,ror#5 add r8,r8,r12 vld1.32 {q8},[r14,:128]! and r2,r2,r4 eor r12,r0,r4,ror#19 eor r0,r8,r8,ror#11 eor r2,r2,r6 vrev32.8 q1,q1 add r7,r7,r12,ror#6 eor r12,r8,r9 eor r0,r0,r8,ror#20 add r7,r7,r2 vadd.i32 q8,q8,q1 ldr r2,[sp,#20] and r3,r3,r12 add r11,r11,r7 add r7,r7,r0,ror#2 eor r3,r3,r9 add r6,r6,r2 eor r2,r4,r5 eor r0,r11,r11,ror#5 add r7,r7,r3 and r2,r2,r11 eor r3,r0,r11,ror#19 eor r0,r7,r7,ror#11 eor r2,r2,r5 add r6,r6,r3,ror#6 eor r3,r7,r8 eor r0,r0,r7,ror#20 add r6,r6,r2 ldr r2,[sp,#24] and r12,r12,r3 add r10,r10,r6 add r6,r6,r0,ror#2 eor r12,r12,r8 add r5,r5,r2 eor r2,r11,r4 eor r0,r10,r10,ror#5 add r6,r6,r12 and r2,r2,r10 eor r12,r0,r10,ror#19 eor r0,r6,r6,ror#11 eor r2,r2,r4 add r5,r5,r12,ror#6 eor r12,r6,r7 eor r0,r0,r6,ror#20 add r5,r5,r2 ldr r2,[sp,#28] and r3,r3,r12 add r9,r9,r5 add r5,r5,r0,ror#2 eor r3,r3,r7 add r4,r4,r2 eor r2,r10,r11 eor r0,r9,r9,ror#5 add r5,r5,r3 and r2,r2,r9 eor r3,r0,r9,ror#19 eor r0,r5,r5,ror#11 eor r2,r2,r11 add r4,r4,r3,ror#6 eor r3,r5,r6 eor r0,r0,r5,ror#20 add r4,r4,r2 ldr r2,[sp,#32] and r12,r12,r3 add r8,r8,r4 add r4,r4,r0,ror#2 eor r12,r12,r6 vst1.32 {q8},[r1,:128]! add r11,r11,r2 eor r2,r9,r10 eor r0,r8,r8,ror#5 add r4,r4,r12 vld1.32 {q8},[r14,:128]! and r2,r2,r8 eor r12,r0,r8,ror#19 eor r0,r4,r4,ror#11 eor r2,r2,r10 vrev32.8 q2,q2 add r11,r11,r12,ror#6 eor r12,r4,r5 eor r0,r0,r4,ror#20 add r11,r11,r2 vadd.i32 q8,q8,q2 ldr r2,[sp,#36] and r3,r3,r12 add r7,r7,r11 add r11,r11,r0,ror#2 eor r3,r3,r5 add r10,r10,r2 eor r2,r8,r9 eor r0,r7,r7,ror#5 add r11,r11,r3 and r2,r2,r7 eor r3,r0,r7,ror#19 eor r0,r11,r11,ror#11 eor r2,r2,r9 add r10,r10,r3,ror#6 eor r3,r11,r4 eor r0,r0,r11,ror#20 add r10,r10,r2 ldr r2,[sp,#40] and r12,r12,r3 add r6,r6,r10 add r10,r10,r0,ror#2 eor r12,r12,r4 add r9,r9,r2 eor r2,r7,r8 eor r0,r6,r6,ror#5 add r10,r10,r12 and r2,r2,r6 eor r12,r0,r6,ror#19 eor r0,r10,r10,ror#11 eor r2,r2,r8 add r9,r9,r12,ror#6 eor r12,r10,r11 eor r0,r0,r10,ror#20 add r9,r9,r2 ldr r2,[sp,#44] and r3,r3,r12 add r5,r5,r9 add r9,r9,r0,ror#2 eor r3,r3,r11 add r8,r8,r2 eor r2,r6,r7 eor r0,r5,r5,ror#5 add r9,r9,r3 and r2,r2,r5 eor r3,r0,r5,ror#19 eor r0,r9,r9,ror#11 eor r2,r2,r7 add r8,r8,r3,ror#6 eor r3,r9,r10 eor r0,r0,r9,ror#20 add r8,r8,r2 ldr r2,[sp,#48] and r12,r12,r3 add r4,r4,r8 add r8,r8,r0,ror#2 eor r12,r12,r10 vst1.32 {q8},[r1,:128]! add r7,r7,r2 eor r2,r5,r6 eor r0,r4,r4,ror#5 add r8,r8,r12 vld1.32 {q8},[r14,:128]! and r2,r2,r4 eor r12,r0,r4,ror#19 eor r0,r8,r8,ror#11 eor r2,r2,r6 vrev32.8 q3,q3 add r7,r7,r12,ror#6 eor r12,r8,r9 eor r0,r0,r8,ror#20 add r7,r7,r2 vadd.i32 q8,q8,q3 ldr r2,[sp,#52] and r3,r3,r12 add r11,r11,r7 add r7,r7,r0,ror#2 eor r3,r3,r9 add r6,r6,r2 eor r2,r4,r5 eor r0,r11,r11,ror#5 add r7,r7,r3 and r2,r2,r11 eor r3,r0,r11,ror#19 eor r0,r7,r7,ror#11 eor r2,r2,r5 add r6,r6,r3,ror#6 eor r3,r7,r8 eor r0,r0,r7,ror#20 add r6,r6,r2 ldr r2,[sp,#56] and r12,r12,r3 add r10,r10,r6 add r6,r6,r0,ror#2 eor r12,r12,r8 add r5,r5,r2 eor r2,r11,r4 eor r0,r10,r10,ror#5 add r6,r6,r12 and r2,r2,r10 eor r12,r0,r10,ror#19 eor r0,r6,r6,ror#11 eor r2,r2,r4 add r5,r5,r12,ror#6 eor r12,r6,r7 eor r0,r0,r6,ror#20 add r5,r5,r2 ldr r2,[sp,#60] and r3,r3,r12 add r9,r9,r5 add r5,r5,r0,ror#2 eor r3,r3,r7 add r4,r4,r2 eor r2,r10,r11 eor r0,r9,r9,ror#5 add r5,r5,r3 and r2,r2,r9 eor r3,r0,r9,ror#19 eor r0,r5,r5,ror#11 eor r2,r2,r11 add r4,r4,r3,ror#6 eor r3,r5,r6 eor r0,r0,r5,ror#20 add r4,r4,r2 ldr r2,[sp,#64] and r12,r12,r3 add r8,r8,r4 add r4,r4,r0,ror#2 eor r12,r12,r6 vst1.32 {q8},[r1,:128]! ldr r0,[r2,#0] add r4,r4,r12 @ h+=Maj(a,b,c) from the past ldr r12,[r2,#4] ldr r3,[r2,#8] ldr r1,[r2,#12] add r4,r4,r0 @ accumulate ldr r0,[r2,#16] add r5,r5,r12 ldr r12,[r2,#20] add r6,r6,r3 ldr r3,[r2,#24] add r7,r7,r1 ldr r1,[r2,#28] add r8,r8,r0 str r4,[r2],#4 add r9,r9,r12 str r5,[r2],#4 add r10,r10,r3 str r6,[r2],#4 add r11,r11,r1 str r7,[r2],#4 stmia r2,{r8,r9,r10,r11} ittte ne movne r1,sp ldrne r2,[sp,#0] eorne r12,r12,r12 ldreq sp,[sp,#76] @ restore original sp itt ne eorne r3,r5,r6 bne .L_00_48 ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc} .size sha256_block_data_order_neon,.-sha256_block_data_order_neon #endif #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) # if defined(__thumb2__) # define INST(a,b,c,d) .byte c,d|0xc,a,b # else # define INST(a,b,c,d) .byte a,b,c,d # endif .LK256_shortcut_hw: @ PC is 8 bytes ahead in Arm mode and 4 bytes ahead in Thumb mode. #if defined(__thumb2__) .word K256-(.LK256_add_hw+4) #else .word K256-(.LK256_add_hw+8) #endif .globl sha256_block_data_order_hw .hidden sha256_block_data_order_hw .type sha256_block_data_order_hw,%function .align 5 sha256_block_data_order_hw: @ K256 is too far to reference from one ADR command in Thumb mode. In @ Arm mode, we could make it fit by aligning the ADR offset to a 64-byte @ boundary. For simplicity, just load the offset from .LK256_shortcut_hw. ldr r3,.LK256_shortcut_hw .LK256_add_hw: add r3,pc,r3 vld1.32 {q0,q1},[r0] add r2,r1,r2,lsl#6 @ len to point at the end of inp b .Loop_v8 .align 4 .Loop_v8: vld1.8 {q8,q9},[r1]! vld1.8 {q10,q11},[r1]! vld1.32 {q12},[r3]! vrev32.8 q8,q8 vrev32.8 q9,q9 vrev32.8 q10,q10 vrev32.8 q11,q11 vmov q14,q0 @ offload vmov q15,q1 teq r1,r2 vld1.32 {q13},[r3]! vadd.i32 q12,q12,q8 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 vmov q2,q0 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 vld1.32 {q12},[r3]! vadd.i32 q13,q13,q9 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 vmov q2,q0 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 vld1.32 {q13},[r3]! vadd.i32 q12,q12,q10 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 vmov q2,q0 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 vld1.32 {q12},[r3]! vadd.i32 q13,q13,q11 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 vmov q2,q0 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 vld1.32 {q13},[r3]! vadd.i32 q12,q12,q8 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 vmov q2,q0 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 vld1.32 {q12},[r3]! vadd.i32 q13,q13,q9 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 vmov q2,q0 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 vld1.32 {q13},[r3]! vadd.i32 q12,q12,q10 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 vmov q2,q0 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 vld1.32 {q12},[r3]! vadd.i32 q13,q13,q11 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 vmov q2,q0 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 vld1.32 {q13},[r3]! vadd.i32 q12,q12,q8 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 vmov q2,q0 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 vld1.32 {q12},[r3]! vadd.i32 q13,q13,q9 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 vmov q2,q0 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 vld1.32 {q13},[r3]! vadd.i32 q12,q12,q10 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 vmov q2,q0 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 vld1.32 {q12},[r3]! vadd.i32 q13,q13,q11 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 vmov q2,q0 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 vld1.32 {q13},[r3]! vadd.i32 q12,q12,q8 vmov q2,q0 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 vld1.32 {q12},[r3]! vadd.i32 q13,q13,q9 vmov q2,q0 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 vld1.32 {q13},[r3] vadd.i32 q12,q12,q10 sub r3,r3,#256-16 @ rewind vmov q2,q0 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 vadd.i32 q13,q13,q11 vmov q2,q0 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 vadd.i32 q0,q0,q14 vadd.i32 q1,q1,q15 it ne bne .Loop_v8 vst1.32 {q0,q1},[r0] bx lr @ bx lr .size sha256_block_data_order_hw,.-sha256_block_data_order_hw #endif .byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,52,47,78,69,79,78,47,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_ARM) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
42,817
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-arm/crypto/fipsmodule/sha512-armv4.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_ARM) && defined(__ELF__) @ Copyright 2007-2016 The OpenSSL Project Authors. All Rights Reserved. @ @ Licensed under the OpenSSL license (the "License"). You may not use @ this file except in compliance with the License. You can obtain a copy @ in the file LICENSE in the source distribution or at @ https://www.openssl.org/source/license.html @ ==================================================================== @ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL @ project. The module is, however, dual licensed under OpenSSL and @ CRYPTOGAMS licenses depending on where you obtain it. For further @ details see http://www.openssl.org/~appro/cryptogams/. @ @ Permission to use under GPL terms is granted. @ ==================================================================== @ SHA512 block procedure for ARMv4. September 2007. @ This code is ~4.5 (four and a half) times faster than code generated @ by gcc 3.4 and it spends ~72 clock cycles per byte [on single-issue @ Xscale PXA250 core]. @ @ July 2010. @ @ Rescheduling for dual-issue pipeline resulted in 6% improvement on @ Cortex A8 core and ~40 cycles per processed byte. @ February 2011. @ @ Profiler-assisted and platform-specific optimization resulted in 7% @ improvement on Coxtex A8 core and ~38 cycles per byte. @ March 2011. @ @ Add NEON implementation. On Cortex A8 it was measured to process @ one byte in 23.3 cycles or ~60% faster than integer-only code. @ August 2012. @ @ Improve NEON performance by 12% on Snapdragon S4. In absolute @ terms it's 22.6 cycles per byte, which is disappointing result. @ Technical writers asserted that 3-way S4 pipeline can sustain @ multiple NEON instructions per cycle, but dual NEON issue could @ not be observed, see http://www.openssl.org/~appro/Snapdragon-S4.html @ for further details. On side note Cortex-A15 processes one byte in @ 16 cycles. @ Byte order [in]dependence. ========================================= @ @ Originally caller was expected to maintain specific *dword* order in @ h[0-7], namely with most significant dword at *lower* address, which @ was reflected in below two parameters as 0 and 4. Now caller is @ expected to maintain native byte order for whole 64-bit values. #ifndef __KERNEL__ # include <openssl/arm_arch.h> # define VFP_ABI_PUSH vstmdb sp!,{d8-d15} # define VFP_ABI_POP vldmia sp!,{d8-d15} #else # define __ARM_MAX_ARCH__ 7 # define VFP_ABI_PUSH # define VFP_ABI_POP #endif @ Silence ARMv8 deprecated IT instruction warnings. This file is used by both @ ARMv7 and ARMv8 processors and does not use ARMv8 instructions. .arch armv7-a #ifdef __ARMEL__ # define LO 0 # define HI 4 # define WORD64(hi0,lo0,hi1,lo1) .word lo0,hi0, lo1,hi1 #else # define HI 0 # define LO 4 # define WORD64(hi0,lo0,hi1,lo1) .word hi0,lo0, hi1,lo1 #endif .text #if defined(__thumb2__) .syntax unified .thumb # define adrl adr #else .code 32 #endif .type K512,%object .align 5 K512: WORD64(0x428a2f98,0xd728ae22, 0x71374491,0x23ef65cd) WORD64(0xb5c0fbcf,0xec4d3b2f, 0xe9b5dba5,0x8189dbbc) WORD64(0x3956c25b,0xf348b538, 0x59f111f1,0xb605d019) WORD64(0x923f82a4,0xaf194f9b, 0xab1c5ed5,0xda6d8118) WORD64(0xd807aa98,0xa3030242, 0x12835b01,0x45706fbe) WORD64(0x243185be,0x4ee4b28c, 0x550c7dc3,0xd5ffb4e2) WORD64(0x72be5d74,0xf27b896f, 0x80deb1fe,0x3b1696b1) WORD64(0x9bdc06a7,0x25c71235, 0xc19bf174,0xcf692694) WORD64(0xe49b69c1,0x9ef14ad2, 0xefbe4786,0x384f25e3) WORD64(0x0fc19dc6,0x8b8cd5b5, 0x240ca1cc,0x77ac9c65) WORD64(0x2de92c6f,0x592b0275, 0x4a7484aa,0x6ea6e483) WORD64(0x5cb0a9dc,0xbd41fbd4, 0x76f988da,0x831153b5) WORD64(0x983e5152,0xee66dfab, 0xa831c66d,0x2db43210) WORD64(0xb00327c8,0x98fb213f, 0xbf597fc7,0xbeef0ee4) WORD64(0xc6e00bf3,0x3da88fc2, 0xd5a79147,0x930aa725) WORD64(0x06ca6351,0xe003826f, 0x14292967,0x0a0e6e70) WORD64(0x27b70a85,0x46d22ffc, 0x2e1b2138,0x5c26c926) WORD64(0x4d2c6dfc,0x5ac42aed, 0x53380d13,0x9d95b3df) WORD64(0x650a7354,0x8baf63de, 0x766a0abb,0x3c77b2a8) WORD64(0x81c2c92e,0x47edaee6, 0x92722c85,0x1482353b) WORD64(0xa2bfe8a1,0x4cf10364, 0xa81a664b,0xbc423001) WORD64(0xc24b8b70,0xd0f89791, 0xc76c51a3,0x0654be30) WORD64(0xd192e819,0xd6ef5218, 0xd6990624,0x5565a910) WORD64(0xf40e3585,0x5771202a, 0x106aa070,0x32bbd1b8) WORD64(0x19a4c116,0xb8d2d0c8, 0x1e376c08,0x5141ab53) WORD64(0x2748774c,0xdf8eeb99, 0x34b0bcb5,0xe19b48a8) WORD64(0x391c0cb3,0xc5c95a63, 0x4ed8aa4a,0xe3418acb) WORD64(0x5b9cca4f,0x7763e373, 0x682e6ff3,0xd6b2b8a3) WORD64(0x748f82ee,0x5defb2fc, 0x78a5636f,0x43172f60) WORD64(0x84c87814,0xa1f0ab72, 0x8cc70208,0x1a6439ec) WORD64(0x90befffa,0x23631e28, 0xa4506ceb,0xde82bde9) WORD64(0xbef9a3f7,0xb2c67915, 0xc67178f2,0xe372532b) WORD64(0xca273ece,0xea26619c, 0xd186b8c7,0x21c0c207) WORD64(0xeada7dd6,0xcde0eb1e, 0xf57d4f7f,0xee6ed178) WORD64(0x06f067aa,0x72176fba, 0x0a637dc5,0xa2c898a6) WORD64(0x113f9804,0xbef90dae, 0x1b710b35,0x131c471b) WORD64(0x28db77f5,0x23047d84, 0x32caab7b,0x40c72493) WORD64(0x3c9ebe0a,0x15c9bebc, 0x431d67c4,0x9c100d4c) WORD64(0x4cc5d4be,0xcb3e42b6, 0x597f299c,0xfc657e2a) WORD64(0x5fcb6fab,0x3ad6faec, 0x6c44198c,0x4a475817) .size K512,.-K512 .globl sha512_block_data_order_nohw .hidden sha512_block_data_order_nohw .type sha512_block_data_order_nohw,%function sha512_block_data_order_nohw: add r2,r1,r2,lsl#7 @ len to point at the end of inp stmdb sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} adr r14,K512 sub sp,sp,#9*8 ldr r7,[r0,#32+LO] ldr r8,[r0,#32+HI] ldr r9, [r0,#48+LO] ldr r10, [r0,#48+HI] ldr r11, [r0,#56+LO] ldr r12, [r0,#56+HI] .Loop: str r9, [sp,#48+0] str r10, [sp,#48+4] str r11, [sp,#56+0] str r12, [sp,#56+4] ldr r5,[r0,#0+LO] ldr r6,[r0,#0+HI] ldr r3,[r0,#8+LO] ldr r4,[r0,#8+HI] ldr r9, [r0,#16+LO] ldr r10, [r0,#16+HI] ldr r11, [r0,#24+LO] ldr r12, [r0,#24+HI] str r3,[sp,#8+0] str r4,[sp,#8+4] str r9, [sp,#16+0] str r10, [sp,#16+4] str r11, [sp,#24+0] str r12, [sp,#24+4] ldr r3,[r0,#40+LO] ldr r4,[r0,#40+HI] str r3,[sp,#40+0] str r4,[sp,#40+4] .L00_15: #if __ARM_ARCH<7 ldrb r3,[r1,#7] ldrb r9, [r1,#6] ldrb r10, [r1,#5] ldrb r11, [r1,#4] ldrb r4,[r1,#3] ldrb r12, [r1,#2] orr r3,r3,r9,lsl#8 ldrb r9, [r1,#1] orr r3,r3,r10,lsl#16 ldrb r10, [r1],#8 orr r3,r3,r11,lsl#24 orr r4,r4,r12,lsl#8 orr r4,r4,r9,lsl#16 orr r4,r4,r10,lsl#24 #else ldr r3,[r1,#4] ldr r4,[r1],#8 #ifdef __ARMEL__ rev r3,r3 rev r4,r4 #endif #endif @ Sigma1(x) (ROTR((x),14) ^ ROTR((x),18) ^ ROTR((x),41)) @ LO lo>>14^hi<<18 ^ lo>>18^hi<<14 ^ hi>>9^lo<<23 @ HI hi>>14^lo<<18 ^ hi>>18^lo<<14 ^ lo>>9^hi<<23 mov r9,r7,lsr#14 str r3,[sp,#64+0] mov r10,r8,lsr#14 str r4,[sp,#64+4] eor r9,r9,r8,lsl#18 ldr r11,[sp,#56+0] @ h.lo eor r10,r10,r7,lsl#18 ldr r12,[sp,#56+4] @ h.hi eor r9,r9,r7,lsr#18 eor r10,r10,r8,lsr#18 eor r9,r9,r8,lsl#14 eor r10,r10,r7,lsl#14 eor r9,r9,r8,lsr#9 eor r10,r10,r7,lsr#9 eor r9,r9,r7,lsl#23 eor r10,r10,r8,lsl#23 @ Sigma1(e) adds r3,r3,r9 ldr r9,[sp,#40+0] @ f.lo adc r4,r4,r10 @ T += Sigma1(e) ldr r10,[sp,#40+4] @ f.hi adds r3,r3,r11 ldr r11,[sp,#48+0] @ g.lo adc r4,r4,r12 @ T += h ldr r12,[sp,#48+4] @ g.hi eor r9,r9,r11 str r7,[sp,#32+0] eor r10,r10,r12 str r8,[sp,#32+4] and r9,r9,r7 str r5,[sp,#0+0] and r10,r10,r8 str r6,[sp,#0+4] eor r9,r9,r11 ldr r11,[r14,#LO] @ K[i].lo eor r10,r10,r12 @ Ch(e,f,g) ldr r12,[r14,#HI] @ K[i].hi adds r3,r3,r9 ldr r7,[sp,#24+0] @ d.lo adc r4,r4,r10 @ T += Ch(e,f,g) ldr r8,[sp,#24+4] @ d.hi adds r3,r3,r11 and r9,r11,#0xff adc r4,r4,r12 @ T += K[i] adds r7,r7,r3 ldr r11,[sp,#8+0] @ b.lo adc r8,r8,r4 @ d += T teq r9,#148 ldr r12,[sp,#16+0] @ c.lo #if __ARM_ARCH>=7 it eq @ Thumb2 thing, sanity check in ARM #endif orreq r14,r14,#1 @ Sigma0(x) (ROTR((x),28) ^ ROTR((x),34) ^ ROTR((x),39)) @ LO lo>>28^hi<<4 ^ hi>>2^lo<<30 ^ hi>>7^lo<<25 @ HI hi>>28^lo<<4 ^ lo>>2^hi<<30 ^ lo>>7^hi<<25 mov r9,r5,lsr#28 mov r10,r6,lsr#28 eor r9,r9,r6,lsl#4 eor r10,r10,r5,lsl#4 eor r9,r9,r6,lsr#2 eor r10,r10,r5,lsr#2 eor r9,r9,r5,lsl#30 eor r10,r10,r6,lsl#30 eor r9,r9,r6,lsr#7 eor r10,r10,r5,lsr#7 eor r9,r9,r5,lsl#25 eor r10,r10,r6,lsl#25 @ Sigma0(a) adds r3,r3,r9 and r9,r5,r11 adc r4,r4,r10 @ T += Sigma0(a) ldr r10,[sp,#8+4] @ b.hi orr r5,r5,r11 ldr r11,[sp,#16+4] @ c.hi and r5,r5,r12 and r12,r6,r10 orr r6,r6,r10 orr r5,r5,r9 @ Maj(a,b,c).lo and r6,r6,r11 adds r5,r5,r3 orr r6,r6,r12 @ Maj(a,b,c).hi sub sp,sp,#8 adc r6,r6,r4 @ h += T tst r14,#1 add r14,r14,#8 tst r14,#1 beq .L00_15 ldr r9,[sp,#184+0] ldr r10,[sp,#184+4] bic r14,r14,#1 .L16_79: @ sigma0(x) (ROTR((x),1) ^ ROTR((x),8) ^ ((x)>>7)) @ LO lo>>1^hi<<31 ^ lo>>8^hi<<24 ^ lo>>7^hi<<25 @ HI hi>>1^lo<<31 ^ hi>>8^lo<<24 ^ hi>>7 mov r3,r9,lsr#1 ldr r11,[sp,#80+0] mov r4,r10,lsr#1 ldr r12,[sp,#80+4] eor r3,r3,r10,lsl#31 eor r4,r4,r9,lsl#31 eor r3,r3,r9,lsr#8 eor r4,r4,r10,lsr#8 eor r3,r3,r10,lsl#24 eor r4,r4,r9,lsl#24 eor r3,r3,r9,lsr#7 eor r4,r4,r10,lsr#7 eor r3,r3,r10,lsl#25 @ sigma1(x) (ROTR((x),19) ^ ROTR((x),61) ^ ((x)>>6)) @ LO lo>>19^hi<<13 ^ hi>>29^lo<<3 ^ lo>>6^hi<<26 @ HI hi>>19^lo<<13 ^ lo>>29^hi<<3 ^ hi>>6 mov r9,r11,lsr#19 mov r10,r12,lsr#19 eor r9,r9,r12,lsl#13 eor r10,r10,r11,lsl#13 eor r9,r9,r12,lsr#29 eor r10,r10,r11,lsr#29 eor r9,r9,r11,lsl#3 eor r10,r10,r12,lsl#3 eor r9,r9,r11,lsr#6 eor r10,r10,r12,lsr#6 ldr r11,[sp,#120+0] eor r9,r9,r12,lsl#26 ldr r12,[sp,#120+4] adds r3,r3,r9 ldr r9,[sp,#192+0] adc r4,r4,r10 ldr r10,[sp,#192+4] adds r3,r3,r11 adc r4,r4,r12 adds r3,r3,r9 adc r4,r4,r10 @ Sigma1(x) (ROTR((x),14) ^ ROTR((x),18) ^ ROTR((x),41)) @ LO lo>>14^hi<<18 ^ lo>>18^hi<<14 ^ hi>>9^lo<<23 @ HI hi>>14^lo<<18 ^ hi>>18^lo<<14 ^ lo>>9^hi<<23 mov r9,r7,lsr#14 str r3,[sp,#64+0] mov r10,r8,lsr#14 str r4,[sp,#64+4] eor r9,r9,r8,lsl#18 ldr r11,[sp,#56+0] @ h.lo eor r10,r10,r7,lsl#18 ldr r12,[sp,#56+4] @ h.hi eor r9,r9,r7,lsr#18 eor r10,r10,r8,lsr#18 eor r9,r9,r8,lsl#14 eor r10,r10,r7,lsl#14 eor r9,r9,r8,lsr#9 eor r10,r10,r7,lsr#9 eor r9,r9,r7,lsl#23 eor r10,r10,r8,lsl#23 @ Sigma1(e) adds r3,r3,r9 ldr r9,[sp,#40+0] @ f.lo adc r4,r4,r10 @ T += Sigma1(e) ldr r10,[sp,#40+4] @ f.hi adds r3,r3,r11 ldr r11,[sp,#48+0] @ g.lo adc r4,r4,r12 @ T += h ldr r12,[sp,#48+4] @ g.hi eor r9,r9,r11 str r7,[sp,#32+0] eor r10,r10,r12 str r8,[sp,#32+4] and r9,r9,r7 str r5,[sp,#0+0] and r10,r10,r8 str r6,[sp,#0+4] eor r9,r9,r11 ldr r11,[r14,#LO] @ K[i].lo eor r10,r10,r12 @ Ch(e,f,g) ldr r12,[r14,#HI] @ K[i].hi adds r3,r3,r9 ldr r7,[sp,#24+0] @ d.lo adc r4,r4,r10 @ T += Ch(e,f,g) ldr r8,[sp,#24+4] @ d.hi adds r3,r3,r11 and r9,r11,#0xff adc r4,r4,r12 @ T += K[i] adds r7,r7,r3 ldr r11,[sp,#8+0] @ b.lo adc r8,r8,r4 @ d += T teq r9,#23 ldr r12,[sp,#16+0] @ c.lo #if __ARM_ARCH>=7 it eq @ Thumb2 thing, sanity check in ARM #endif orreq r14,r14,#1 @ Sigma0(x) (ROTR((x),28) ^ ROTR((x),34) ^ ROTR((x),39)) @ LO lo>>28^hi<<4 ^ hi>>2^lo<<30 ^ hi>>7^lo<<25 @ HI hi>>28^lo<<4 ^ lo>>2^hi<<30 ^ lo>>7^hi<<25 mov r9,r5,lsr#28 mov r10,r6,lsr#28 eor r9,r9,r6,lsl#4 eor r10,r10,r5,lsl#4 eor r9,r9,r6,lsr#2 eor r10,r10,r5,lsr#2 eor r9,r9,r5,lsl#30 eor r10,r10,r6,lsl#30 eor r9,r9,r6,lsr#7 eor r10,r10,r5,lsr#7 eor r9,r9,r5,lsl#25 eor r10,r10,r6,lsl#25 @ Sigma0(a) adds r3,r3,r9 and r9,r5,r11 adc r4,r4,r10 @ T += Sigma0(a) ldr r10,[sp,#8+4] @ b.hi orr r5,r5,r11 ldr r11,[sp,#16+4] @ c.hi and r5,r5,r12 and r12,r6,r10 orr r6,r6,r10 orr r5,r5,r9 @ Maj(a,b,c).lo and r6,r6,r11 adds r5,r5,r3 orr r6,r6,r12 @ Maj(a,b,c).hi sub sp,sp,#8 adc r6,r6,r4 @ h += T tst r14,#1 add r14,r14,#8 #if __ARM_ARCH>=7 ittt eq @ Thumb2 thing, sanity check in ARM #endif ldreq r9,[sp,#184+0] ldreq r10,[sp,#184+4] beq .L16_79 bic r14,r14,#1 ldr r3,[sp,#8+0] ldr r4,[sp,#8+4] ldr r9, [r0,#0+LO] ldr r10, [r0,#0+HI] ldr r11, [r0,#8+LO] ldr r12, [r0,#8+HI] adds r9,r5,r9 str r9, [r0,#0+LO] adc r10,r6,r10 str r10, [r0,#0+HI] adds r11,r3,r11 str r11, [r0,#8+LO] adc r12,r4,r12 str r12, [r0,#8+HI] ldr r5,[sp,#16+0] ldr r6,[sp,#16+4] ldr r3,[sp,#24+0] ldr r4,[sp,#24+4] ldr r9, [r0,#16+LO] ldr r10, [r0,#16+HI] ldr r11, [r0,#24+LO] ldr r12, [r0,#24+HI] adds r9,r5,r9 str r9, [r0,#16+LO] adc r10,r6,r10 str r10, [r0,#16+HI] adds r11,r3,r11 str r11, [r0,#24+LO] adc r12,r4,r12 str r12, [r0,#24+HI] ldr r3,[sp,#40+0] ldr r4,[sp,#40+4] ldr r9, [r0,#32+LO] ldr r10, [r0,#32+HI] ldr r11, [r0,#40+LO] ldr r12, [r0,#40+HI] adds r7,r7,r9 str r7,[r0,#32+LO] adc r8,r8,r10 str r8,[r0,#32+HI] adds r11,r3,r11 str r11, [r0,#40+LO] adc r12,r4,r12 str r12, [r0,#40+HI] ldr r5,[sp,#48+0] ldr r6,[sp,#48+4] ldr r3,[sp,#56+0] ldr r4,[sp,#56+4] ldr r9, [r0,#48+LO] ldr r10, [r0,#48+HI] ldr r11, [r0,#56+LO] ldr r12, [r0,#56+HI] adds r9,r5,r9 str r9, [r0,#48+LO] adc r10,r6,r10 str r10, [r0,#48+HI] adds r11,r3,r11 str r11, [r0,#56+LO] adc r12,r4,r12 str r12, [r0,#56+HI] add sp,sp,#640 sub r14,r14,#640 teq r1,r2 bne .Loop add sp,sp,#8*9 @ destroy frame #if __ARM_ARCH>=5 ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc} #else ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} tst lr,#1 moveq pc,lr @ be binary compatible with V4, yet .word 0xe12fff1e @ interoperable with Thumb ISA:-) #endif .size sha512_block_data_order_nohw,.-sha512_block_data_order_nohw #if __ARM_MAX_ARCH__>=7 .arch armv7-a .fpu neon .globl sha512_block_data_order_neon .hidden sha512_block_data_order_neon .type sha512_block_data_order_neon,%function .align 4 sha512_block_data_order_neon: dmb @ errata #451034 on early Cortex A8 add r2,r1,r2,lsl#7 @ len to point at the end of inp adr r3,K512 VFP_ABI_PUSH vldmia r0,{d16,d17,d18,d19,d20,d21,d22,d23} @ load context .Loop_neon: vshr.u64 d24,d20,#14 @ 0 #if 0<16 vld1.64 {d0},[r1]! @ handles unaligned #endif vshr.u64 d25,d20,#18 #if 0>0 vadd.i64 d16,d30 @ h+=Maj from the past #endif vshr.u64 d26,d20,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d20,#50 vsli.64 d25,d20,#46 vmov d29,d20 vsli.64 d26,d20,#23 #if 0<16 && defined(__ARMEL__) vrev64.8 d0,d0 #endif veor d25,d24 vbsl d29,d21,d22 @ Ch(e,f,g) vshr.u64 d24,d16,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d23 vshr.u64 d25,d16,#34 vsli.64 d24,d16,#36 vadd.i64 d27,d26 vshr.u64 d26,d16,#39 vadd.i64 d28,d0 vsli.64 d25,d16,#30 veor d30,d16,d17 vsli.64 d26,d16,#25 veor d23,d24,d25 vadd.i64 d27,d28 vbsl d30,d18,d17 @ Maj(a,b,c) veor d23,d26 @ Sigma0(a) vadd.i64 d19,d27 vadd.i64 d30,d27 @ vadd.i64 d23,d30 vshr.u64 d24,d19,#14 @ 1 #if 1<16 vld1.64 {d1},[r1]! @ handles unaligned #endif vshr.u64 d25,d19,#18 #if 1>0 vadd.i64 d23,d30 @ h+=Maj from the past #endif vshr.u64 d26,d19,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d19,#50 vsli.64 d25,d19,#46 vmov d29,d19 vsli.64 d26,d19,#23 #if 1<16 && defined(__ARMEL__) vrev64.8 d1,d1 #endif veor d25,d24 vbsl d29,d20,d21 @ Ch(e,f,g) vshr.u64 d24,d23,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d22 vshr.u64 d25,d23,#34 vsli.64 d24,d23,#36 vadd.i64 d27,d26 vshr.u64 d26,d23,#39 vadd.i64 d28,d1 vsli.64 d25,d23,#30 veor d30,d23,d16 vsli.64 d26,d23,#25 veor d22,d24,d25 vadd.i64 d27,d28 vbsl d30,d17,d16 @ Maj(a,b,c) veor d22,d26 @ Sigma0(a) vadd.i64 d18,d27 vadd.i64 d30,d27 @ vadd.i64 d22,d30 vshr.u64 d24,d18,#14 @ 2 #if 2<16 vld1.64 {d2},[r1]! @ handles unaligned #endif vshr.u64 d25,d18,#18 #if 2>0 vadd.i64 d22,d30 @ h+=Maj from the past #endif vshr.u64 d26,d18,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d18,#50 vsli.64 d25,d18,#46 vmov d29,d18 vsli.64 d26,d18,#23 #if 2<16 && defined(__ARMEL__) vrev64.8 d2,d2 #endif veor d25,d24 vbsl d29,d19,d20 @ Ch(e,f,g) vshr.u64 d24,d22,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d21 vshr.u64 d25,d22,#34 vsli.64 d24,d22,#36 vadd.i64 d27,d26 vshr.u64 d26,d22,#39 vadd.i64 d28,d2 vsli.64 d25,d22,#30 veor d30,d22,d23 vsli.64 d26,d22,#25 veor d21,d24,d25 vadd.i64 d27,d28 vbsl d30,d16,d23 @ Maj(a,b,c) veor d21,d26 @ Sigma0(a) vadd.i64 d17,d27 vadd.i64 d30,d27 @ vadd.i64 d21,d30 vshr.u64 d24,d17,#14 @ 3 #if 3<16 vld1.64 {d3},[r1]! @ handles unaligned #endif vshr.u64 d25,d17,#18 #if 3>0 vadd.i64 d21,d30 @ h+=Maj from the past #endif vshr.u64 d26,d17,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d17,#50 vsli.64 d25,d17,#46 vmov d29,d17 vsli.64 d26,d17,#23 #if 3<16 && defined(__ARMEL__) vrev64.8 d3,d3 #endif veor d25,d24 vbsl d29,d18,d19 @ Ch(e,f,g) vshr.u64 d24,d21,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d20 vshr.u64 d25,d21,#34 vsli.64 d24,d21,#36 vadd.i64 d27,d26 vshr.u64 d26,d21,#39 vadd.i64 d28,d3 vsli.64 d25,d21,#30 veor d30,d21,d22 vsli.64 d26,d21,#25 veor d20,d24,d25 vadd.i64 d27,d28 vbsl d30,d23,d22 @ Maj(a,b,c) veor d20,d26 @ Sigma0(a) vadd.i64 d16,d27 vadd.i64 d30,d27 @ vadd.i64 d20,d30 vshr.u64 d24,d16,#14 @ 4 #if 4<16 vld1.64 {d4},[r1]! @ handles unaligned #endif vshr.u64 d25,d16,#18 #if 4>0 vadd.i64 d20,d30 @ h+=Maj from the past #endif vshr.u64 d26,d16,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d16,#50 vsli.64 d25,d16,#46 vmov d29,d16 vsli.64 d26,d16,#23 #if 4<16 && defined(__ARMEL__) vrev64.8 d4,d4 #endif veor d25,d24 vbsl d29,d17,d18 @ Ch(e,f,g) vshr.u64 d24,d20,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d19 vshr.u64 d25,d20,#34 vsli.64 d24,d20,#36 vadd.i64 d27,d26 vshr.u64 d26,d20,#39 vadd.i64 d28,d4 vsli.64 d25,d20,#30 veor d30,d20,d21 vsli.64 d26,d20,#25 veor d19,d24,d25 vadd.i64 d27,d28 vbsl d30,d22,d21 @ Maj(a,b,c) veor d19,d26 @ Sigma0(a) vadd.i64 d23,d27 vadd.i64 d30,d27 @ vadd.i64 d19,d30 vshr.u64 d24,d23,#14 @ 5 #if 5<16 vld1.64 {d5},[r1]! @ handles unaligned #endif vshr.u64 d25,d23,#18 #if 5>0 vadd.i64 d19,d30 @ h+=Maj from the past #endif vshr.u64 d26,d23,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d23,#50 vsli.64 d25,d23,#46 vmov d29,d23 vsli.64 d26,d23,#23 #if 5<16 && defined(__ARMEL__) vrev64.8 d5,d5 #endif veor d25,d24 vbsl d29,d16,d17 @ Ch(e,f,g) vshr.u64 d24,d19,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d18 vshr.u64 d25,d19,#34 vsli.64 d24,d19,#36 vadd.i64 d27,d26 vshr.u64 d26,d19,#39 vadd.i64 d28,d5 vsli.64 d25,d19,#30 veor d30,d19,d20 vsli.64 d26,d19,#25 veor d18,d24,d25 vadd.i64 d27,d28 vbsl d30,d21,d20 @ Maj(a,b,c) veor d18,d26 @ Sigma0(a) vadd.i64 d22,d27 vadd.i64 d30,d27 @ vadd.i64 d18,d30 vshr.u64 d24,d22,#14 @ 6 #if 6<16 vld1.64 {d6},[r1]! @ handles unaligned #endif vshr.u64 d25,d22,#18 #if 6>0 vadd.i64 d18,d30 @ h+=Maj from the past #endif vshr.u64 d26,d22,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d22,#50 vsli.64 d25,d22,#46 vmov d29,d22 vsli.64 d26,d22,#23 #if 6<16 && defined(__ARMEL__) vrev64.8 d6,d6 #endif veor d25,d24 vbsl d29,d23,d16 @ Ch(e,f,g) vshr.u64 d24,d18,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d17 vshr.u64 d25,d18,#34 vsli.64 d24,d18,#36 vadd.i64 d27,d26 vshr.u64 d26,d18,#39 vadd.i64 d28,d6 vsli.64 d25,d18,#30 veor d30,d18,d19 vsli.64 d26,d18,#25 veor d17,d24,d25 vadd.i64 d27,d28 vbsl d30,d20,d19 @ Maj(a,b,c) veor d17,d26 @ Sigma0(a) vadd.i64 d21,d27 vadd.i64 d30,d27 @ vadd.i64 d17,d30 vshr.u64 d24,d21,#14 @ 7 #if 7<16 vld1.64 {d7},[r1]! @ handles unaligned #endif vshr.u64 d25,d21,#18 #if 7>0 vadd.i64 d17,d30 @ h+=Maj from the past #endif vshr.u64 d26,d21,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d21,#50 vsli.64 d25,d21,#46 vmov d29,d21 vsli.64 d26,d21,#23 #if 7<16 && defined(__ARMEL__) vrev64.8 d7,d7 #endif veor d25,d24 vbsl d29,d22,d23 @ Ch(e,f,g) vshr.u64 d24,d17,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d16 vshr.u64 d25,d17,#34 vsli.64 d24,d17,#36 vadd.i64 d27,d26 vshr.u64 d26,d17,#39 vadd.i64 d28,d7 vsli.64 d25,d17,#30 veor d30,d17,d18 vsli.64 d26,d17,#25 veor d16,d24,d25 vadd.i64 d27,d28 vbsl d30,d19,d18 @ Maj(a,b,c) veor d16,d26 @ Sigma0(a) vadd.i64 d20,d27 vadd.i64 d30,d27 @ vadd.i64 d16,d30 vshr.u64 d24,d20,#14 @ 8 #if 8<16 vld1.64 {d8},[r1]! @ handles unaligned #endif vshr.u64 d25,d20,#18 #if 8>0 vadd.i64 d16,d30 @ h+=Maj from the past #endif vshr.u64 d26,d20,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d20,#50 vsli.64 d25,d20,#46 vmov d29,d20 vsli.64 d26,d20,#23 #if 8<16 && defined(__ARMEL__) vrev64.8 d8,d8 #endif veor d25,d24 vbsl d29,d21,d22 @ Ch(e,f,g) vshr.u64 d24,d16,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d23 vshr.u64 d25,d16,#34 vsli.64 d24,d16,#36 vadd.i64 d27,d26 vshr.u64 d26,d16,#39 vadd.i64 d28,d8 vsli.64 d25,d16,#30 veor d30,d16,d17 vsli.64 d26,d16,#25 veor d23,d24,d25 vadd.i64 d27,d28 vbsl d30,d18,d17 @ Maj(a,b,c) veor d23,d26 @ Sigma0(a) vadd.i64 d19,d27 vadd.i64 d30,d27 @ vadd.i64 d23,d30 vshr.u64 d24,d19,#14 @ 9 #if 9<16 vld1.64 {d9},[r1]! @ handles unaligned #endif vshr.u64 d25,d19,#18 #if 9>0 vadd.i64 d23,d30 @ h+=Maj from the past #endif vshr.u64 d26,d19,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d19,#50 vsli.64 d25,d19,#46 vmov d29,d19 vsli.64 d26,d19,#23 #if 9<16 && defined(__ARMEL__) vrev64.8 d9,d9 #endif veor d25,d24 vbsl d29,d20,d21 @ Ch(e,f,g) vshr.u64 d24,d23,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d22 vshr.u64 d25,d23,#34 vsli.64 d24,d23,#36 vadd.i64 d27,d26 vshr.u64 d26,d23,#39 vadd.i64 d28,d9 vsli.64 d25,d23,#30 veor d30,d23,d16 vsli.64 d26,d23,#25 veor d22,d24,d25 vadd.i64 d27,d28 vbsl d30,d17,d16 @ Maj(a,b,c) veor d22,d26 @ Sigma0(a) vadd.i64 d18,d27 vadd.i64 d30,d27 @ vadd.i64 d22,d30 vshr.u64 d24,d18,#14 @ 10 #if 10<16 vld1.64 {d10},[r1]! @ handles unaligned #endif vshr.u64 d25,d18,#18 #if 10>0 vadd.i64 d22,d30 @ h+=Maj from the past #endif vshr.u64 d26,d18,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d18,#50 vsli.64 d25,d18,#46 vmov d29,d18 vsli.64 d26,d18,#23 #if 10<16 && defined(__ARMEL__) vrev64.8 d10,d10 #endif veor d25,d24 vbsl d29,d19,d20 @ Ch(e,f,g) vshr.u64 d24,d22,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d21 vshr.u64 d25,d22,#34 vsli.64 d24,d22,#36 vadd.i64 d27,d26 vshr.u64 d26,d22,#39 vadd.i64 d28,d10 vsli.64 d25,d22,#30 veor d30,d22,d23 vsli.64 d26,d22,#25 veor d21,d24,d25 vadd.i64 d27,d28 vbsl d30,d16,d23 @ Maj(a,b,c) veor d21,d26 @ Sigma0(a) vadd.i64 d17,d27 vadd.i64 d30,d27 @ vadd.i64 d21,d30 vshr.u64 d24,d17,#14 @ 11 #if 11<16 vld1.64 {d11},[r1]! @ handles unaligned #endif vshr.u64 d25,d17,#18 #if 11>0 vadd.i64 d21,d30 @ h+=Maj from the past #endif vshr.u64 d26,d17,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d17,#50 vsli.64 d25,d17,#46 vmov d29,d17 vsli.64 d26,d17,#23 #if 11<16 && defined(__ARMEL__) vrev64.8 d11,d11 #endif veor d25,d24 vbsl d29,d18,d19 @ Ch(e,f,g) vshr.u64 d24,d21,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d20 vshr.u64 d25,d21,#34 vsli.64 d24,d21,#36 vadd.i64 d27,d26 vshr.u64 d26,d21,#39 vadd.i64 d28,d11 vsli.64 d25,d21,#30 veor d30,d21,d22 vsli.64 d26,d21,#25 veor d20,d24,d25 vadd.i64 d27,d28 vbsl d30,d23,d22 @ Maj(a,b,c) veor d20,d26 @ Sigma0(a) vadd.i64 d16,d27 vadd.i64 d30,d27 @ vadd.i64 d20,d30 vshr.u64 d24,d16,#14 @ 12 #if 12<16 vld1.64 {d12},[r1]! @ handles unaligned #endif vshr.u64 d25,d16,#18 #if 12>0 vadd.i64 d20,d30 @ h+=Maj from the past #endif vshr.u64 d26,d16,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d16,#50 vsli.64 d25,d16,#46 vmov d29,d16 vsli.64 d26,d16,#23 #if 12<16 && defined(__ARMEL__) vrev64.8 d12,d12 #endif veor d25,d24 vbsl d29,d17,d18 @ Ch(e,f,g) vshr.u64 d24,d20,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d19 vshr.u64 d25,d20,#34 vsli.64 d24,d20,#36 vadd.i64 d27,d26 vshr.u64 d26,d20,#39 vadd.i64 d28,d12 vsli.64 d25,d20,#30 veor d30,d20,d21 vsli.64 d26,d20,#25 veor d19,d24,d25 vadd.i64 d27,d28 vbsl d30,d22,d21 @ Maj(a,b,c) veor d19,d26 @ Sigma0(a) vadd.i64 d23,d27 vadd.i64 d30,d27 @ vadd.i64 d19,d30 vshr.u64 d24,d23,#14 @ 13 #if 13<16 vld1.64 {d13},[r1]! @ handles unaligned #endif vshr.u64 d25,d23,#18 #if 13>0 vadd.i64 d19,d30 @ h+=Maj from the past #endif vshr.u64 d26,d23,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d23,#50 vsli.64 d25,d23,#46 vmov d29,d23 vsli.64 d26,d23,#23 #if 13<16 && defined(__ARMEL__) vrev64.8 d13,d13 #endif veor d25,d24 vbsl d29,d16,d17 @ Ch(e,f,g) vshr.u64 d24,d19,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d18 vshr.u64 d25,d19,#34 vsli.64 d24,d19,#36 vadd.i64 d27,d26 vshr.u64 d26,d19,#39 vadd.i64 d28,d13 vsli.64 d25,d19,#30 veor d30,d19,d20 vsli.64 d26,d19,#25 veor d18,d24,d25 vadd.i64 d27,d28 vbsl d30,d21,d20 @ Maj(a,b,c) veor d18,d26 @ Sigma0(a) vadd.i64 d22,d27 vadd.i64 d30,d27 @ vadd.i64 d18,d30 vshr.u64 d24,d22,#14 @ 14 #if 14<16 vld1.64 {d14},[r1]! @ handles unaligned #endif vshr.u64 d25,d22,#18 #if 14>0 vadd.i64 d18,d30 @ h+=Maj from the past #endif vshr.u64 d26,d22,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d22,#50 vsli.64 d25,d22,#46 vmov d29,d22 vsli.64 d26,d22,#23 #if 14<16 && defined(__ARMEL__) vrev64.8 d14,d14 #endif veor d25,d24 vbsl d29,d23,d16 @ Ch(e,f,g) vshr.u64 d24,d18,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d17 vshr.u64 d25,d18,#34 vsli.64 d24,d18,#36 vadd.i64 d27,d26 vshr.u64 d26,d18,#39 vadd.i64 d28,d14 vsli.64 d25,d18,#30 veor d30,d18,d19 vsli.64 d26,d18,#25 veor d17,d24,d25 vadd.i64 d27,d28 vbsl d30,d20,d19 @ Maj(a,b,c) veor d17,d26 @ Sigma0(a) vadd.i64 d21,d27 vadd.i64 d30,d27 @ vadd.i64 d17,d30 vshr.u64 d24,d21,#14 @ 15 #if 15<16 vld1.64 {d15},[r1]! @ handles unaligned #endif vshr.u64 d25,d21,#18 #if 15>0 vadd.i64 d17,d30 @ h+=Maj from the past #endif vshr.u64 d26,d21,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d21,#50 vsli.64 d25,d21,#46 vmov d29,d21 vsli.64 d26,d21,#23 #if 15<16 && defined(__ARMEL__) vrev64.8 d15,d15 #endif veor d25,d24 vbsl d29,d22,d23 @ Ch(e,f,g) vshr.u64 d24,d17,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d16 vshr.u64 d25,d17,#34 vsli.64 d24,d17,#36 vadd.i64 d27,d26 vshr.u64 d26,d17,#39 vadd.i64 d28,d15 vsli.64 d25,d17,#30 veor d30,d17,d18 vsli.64 d26,d17,#25 veor d16,d24,d25 vadd.i64 d27,d28 vbsl d30,d19,d18 @ Maj(a,b,c) veor d16,d26 @ Sigma0(a) vadd.i64 d20,d27 vadd.i64 d30,d27 @ vadd.i64 d16,d30 mov r12,#4 .L16_79_neon: subs r12,#1 vshr.u64 q12,q7,#19 vshr.u64 q13,q7,#61 vadd.i64 d16,d30 @ h+=Maj from the past vshr.u64 q15,q7,#6 vsli.64 q12,q7,#45 vext.8 q14,q0,q1,#8 @ X[i+1] vsli.64 q13,q7,#3 veor q15,q12 vshr.u64 q12,q14,#1 veor q15,q13 @ sigma1(X[i+14]) vshr.u64 q13,q14,#8 vadd.i64 q0,q15 vshr.u64 q15,q14,#7 vsli.64 q12,q14,#63 vsli.64 q13,q14,#56 vext.8 q14,q4,q5,#8 @ X[i+9] veor q15,q12 vshr.u64 d24,d20,#14 @ from NEON_00_15 vadd.i64 q0,q14 vshr.u64 d25,d20,#18 @ from NEON_00_15 veor q15,q13 @ sigma0(X[i+1]) vshr.u64 d26,d20,#41 @ from NEON_00_15 vadd.i64 q0,q15 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d20,#50 vsli.64 d25,d20,#46 vmov d29,d20 vsli.64 d26,d20,#23 #if 16<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d21,d22 @ Ch(e,f,g) vshr.u64 d24,d16,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d23 vshr.u64 d25,d16,#34 vsli.64 d24,d16,#36 vadd.i64 d27,d26 vshr.u64 d26,d16,#39 vadd.i64 d28,d0 vsli.64 d25,d16,#30 veor d30,d16,d17 vsli.64 d26,d16,#25 veor d23,d24,d25 vadd.i64 d27,d28 vbsl d30,d18,d17 @ Maj(a,b,c) veor d23,d26 @ Sigma0(a) vadd.i64 d19,d27 vadd.i64 d30,d27 @ vadd.i64 d23,d30 vshr.u64 d24,d19,#14 @ 17 #if 17<16 vld1.64 {d1},[r1]! @ handles unaligned #endif vshr.u64 d25,d19,#18 #if 17>0 vadd.i64 d23,d30 @ h+=Maj from the past #endif vshr.u64 d26,d19,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d19,#50 vsli.64 d25,d19,#46 vmov d29,d19 vsli.64 d26,d19,#23 #if 17<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d20,d21 @ Ch(e,f,g) vshr.u64 d24,d23,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d22 vshr.u64 d25,d23,#34 vsli.64 d24,d23,#36 vadd.i64 d27,d26 vshr.u64 d26,d23,#39 vadd.i64 d28,d1 vsli.64 d25,d23,#30 veor d30,d23,d16 vsli.64 d26,d23,#25 veor d22,d24,d25 vadd.i64 d27,d28 vbsl d30,d17,d16 @ Maj(a,b,c) veor d22,d26 @ Sigma0(a) vadd.i64 d18,d27 vadd.i64 d30,d27 @ vadd.i64 d22,d30 vshr.u64 q12,q0,#19 vshr.u64 q13,q0,#61 vadd.i64 d22,d30 @ h+=Maj from the past vshr.u64 q15,q0,#6 vsli.64 q12,q0,#45 vext.8 q14,q1,q2,#8 @ X[i+1] vsli.64 q13,q0,#3 veor q15,q12 vshr.u64 q12,q14,#1 veor q15,q13 @ sigma1(X[i+14]) vshr.u64 q13,q14,#8 vadd.i64 q1,q15 vshr.u64 q15,q14,#7 vsli.64 q12,q14,#63 vsli.64 q13,q14,#56 vext.8 q14,q5,q6,#8 @ X[i+9] veor q15,q12 vshr.u64 d24,d18,#14 @ from NEON_00_15 vadd.i64 q1,q14 vshr.u64 d25,d18,#18 @ from NEON_00_15 veor q15,q13 @ sigma0(X[i+1]) vshr.u64 d26,d18,#41 @ from NEON_00_15 vadd.i64 q1,q15 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d18,#50 vsli.64 d25,d18,#46 vmov d29,d18 vsli.64 d26,d18,#23 #if 18<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d19,d20 @ Ch(e,f,g) vshr.u64 d24,d22,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d21 vshr.u64 d25,d22,#34 vsli.64 d24,d22,#36 vadd.i64 d27,d26 vshr.u64 d26,d22,#39 vadd.i64 d28,d2 vsli.64 d25,d22,#30 veor d30,d22,d23 vsli.64 d26,d22,#25 veor d21,d24,d25 vadd.i64 d27,d28 vbsl d30,d16,d23 @ Maj(a,b,c) veor d21,d26 @ Sigma0(a) vadd.i64 d17,d27 vadd.i64 d30,d27 @ vadd.i64 d21,d30 vshr.u64 d24,d17,#14 @ 19 #if 19<16 vld1.64 {d3},[r1]! @ handles unaligned #endif vshr.u64 d25,d17,#18 #if 19>0 vadd.i64 d21,d30 @ h+=Maj from the past #endif vshr.u64 d26,d17,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d17,#50 vsli.64 d25,d17,#46 vmov d29,d17 vsli.64 d26,d17,#23 #if 19<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d18,d19 @ Ch(e,f,g) vshr.u64 d24,d21,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d20 vshr.u64 d25,d21,#34 vsli.64 d24,d21,#36 vadd.i64 d27,d26 vshr.u64 d26,d21,#39 vadd.i64 d28,d3 vsli.64 d25,d21,#30 veor d30,d21,d22 vsli.64 d26,d21,#25 veor d20,d24,d25 vadd.i64 d27,d28 vbsl d30,d23,d22 @ Maj(a,b,c) veor d20,d26 @ Sigma0(a) vadd.i64 d16,d27 vadd.i64 d30,d27 @ vadd.i64 d20,d30 vshr.u64 q12,q1,#19 vshr.u64 q13,q1,#61 vadd.i64 d20,d30 @ h+=Maj from the past vshr.u64 q15,q1,#6 vsli.64 q12,q1,#45 vext.8 q14,q2,q3,#8 @ X[i+1] vsli.64 q13,q1,#3 veor q15,q12 vshr.u64 q12,q14,#1 veor q15,q13 @ sigma1(X[i+14]) vshr.u64 q13,q14,#8 vadd.i64 q2,q15 vshr.u64 q15,q14,#7 vsli.64 q12,q14,#63 vsli.64 q13,q14,#56 vext.8 q14,q6,q7,#8 @ X[i+9] veor q15,q12 vshr.u64 d24,d16,#14 @ from NEON_00_15 vadd.i64 q2,q14 vshr.u64 d25,d16,#18 @ from NEON_00_15 veor q15,q13 @ sigma0(X[i+1]) vshr.u64 d26,d16,#41 @ from NEON_00_15 vadd.i64 q2,q15 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d16,#50 vsli.64 d25,d16,#46 vmov d29,d16 vsli.64 d26,d16,#23 #if 20<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d17,d18 @ Ch(e,f,g) vshr.u64 d24,d20,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d19 vshr.u64 d25,d20,#34 vsli.64 d24,d20,#36 vadd.i64 d27,d26 vshr.u64 d26,d20,#39 vadd.i64 d28,d4 vsli.64 d25,d20,#30 veor d30,d20,d21 vsli.64 d26,d20,#25 veor d19,d24,d25 vadd.i64 d27,d28 vbsl d30,d22,d21 @ Maj(a,b,c) veor d19,d26 @ Sigma0(a) vadd.i64 d23,d27 vadd.i64 d30,d27 @ vadd.i64 d19,d30 vshr.u64 d24,d23,#14 @ 21 #if 21<16 vld1.64 {d5},[r1]! @ handles unaligned #endif vshr.u64 d25,d23,#18 #if 21>0 vadd.i64 d19,d30 @ h+=Maj from the past #endif vshr.u64 d26,d23,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d23,#50 vsli.64 d25,d23,#46 vmov d29,d23 vsli.64 d26,d23,#23 #if 21<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d16,d17 @ Ch(e,f,g) vshr.u64 d24,d19,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d18 vshr.u64 d25,d19,#34 vsli.64 d24,d19,#36 vadd.i64 d27,d26 vshr.u64 d26,d19,#39 vadd.i64 d28,d5 vsli.64 d25,d19,#30 veor d30,d19,d20 vsli.64 d26,d19,#25 veor d18,d24,d25 vadd.i64 d27,d28 vbsl d30,d21,d20 @ Maj(a,b,c) veor d18,d26 @ Sigma0(a) vadd.i64 d22,d27 vadd.i64 d30,d27 @ vadd.i64 d18,d30 vshr.u64 q12,q2,#19 vshr.u64 q13,q2,#61 vadd.i64 d18,d30 @ h+=Maj from the past vshr.u64 q15,q2,#6 vsli.64 q12,q2,#45 vext.8 q14,q3,q4,#8 @ X[i+1] vsli.64 q13,q2,#3 veor q15,q12 vshr.u64 q12,q14,#1 veor q15,q13 @ sigma1(X[i+14]) vshr.u64 q13,q14,#8 vadd.i64 q3,q15 vshr.u64 q15,q14,#7 vsli.64 q12,q14,#63 vsli.64 q13,q14,#56 vext.8 q14,q7,q0,#8 @ X[i+9] veor q15,q12 vshr.u64 d24,d22,#14 @ from NEON_00_15 vadd.i64 q3,q14 vshr.u64 d25,d22,#18 @ from NEON_00_15 veor q15,q13 @ sigma0(X[i+1]) vshr.u64 d26,d22,#41 @ from NEON_00_15 vadd.i64 q3,q15 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d22,#50 vsli.64 d25,d22,#46 vmov d29,d22 vsli.64 d26,d22,#23 #if 22<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d23,d16 @ Ch(e,f,g) vshr.u64 d24,d18,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d17 vshr.u64 d25,d18,#34 vsli.64 d24,d18,#36 vadd.i64 d27,d26 vshr.u64 d26,d18,#39 vadd.i64 d28,d6 vsli.64 d25,d18,#30 veor d30,d18,d19 vsli.64 d26,d18,#25 veor d17,d24,d25 vadd.i64 d27,d28 vbsl d30,d20,d19 @ Maj(a,b,c) veor d17,d26 @ Sigma0(a) vadd.i64 d21,d27 vadd.i64 d30,d27 @ vadd.i64 d17,d30 vshr.u64 d24,d21,#14 @ 23 #if 23<16 vld1.64 {d7},[r1]! @ handles unaligned #endif vshr.u64 d25,d21,#18 #if 23>0 vadd.i64 d17,d30 @ h+=Maj from the past #endif vshr.u64 d26,d21,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d21,#50 vsli.64 d25,d21,#46 vmov d29,d21 vsli.64 d26,d21,#23 #if 23<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d22,d23 @ Ch(e,f,g) vshr.u64 d24,d17,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d16 vshr.u64 d25,d17,#34 vsli.64 d24,d17,#36 vadd.i64 d27,d26 vshr.u64 d26,d17,#39 vadd.i64 d28,d7 vsli.64 d25,d17,#30 veor d30,d17,d18 vsli.64 d26,d17,#25 veor d16,d24,d25 vadd.i64 d27,d28 vbsl d30,d19,d18 @ Maj(a,b,c) veor d16,d26 @ Sigma0(a) vadd.i64 d20,d27 vadd.i64 d30,d27 @ vadd.i64 d16,d30 vshr.u64 q12,q3,#19 vshr.u64 q13,q3,#61 vadd.i64 d16,d30 @ h+=Maj from the past vshr.u64 q15,q3,#6 vsli.64 q12,q3,#45 vext.8 q14,q4,q5,#8 @ X[i+1] vsli.64 q13,q3,#3 veor q15,q12 vshr.u64 q12,q14,#1 veor q15,q13 @ sigma1(X[i+14]) vshr.u64 q13,q14,#8 vadd.i64 q4,q15 vshr.u64 q15,q14,#7 vsli.64 q12,q14,#63 vsli.64 q13,q14,#56 vext.8 q14,q0,q1,#8 @ X[i+9] veor q15,q12 vshr.u64 d24,d20,#14 @ from NEON_00_15 vadd.i64 q4,q14 vshr.u64 d25,d20,#18 @ from NEON_00_15 veor q15,q13 @ sigma0(X[i+1]) vshr.u64 d26,d20,#41 @ from NEON_00_15 vadd.i64 q4,q15 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d20,#50 vsli.64 d25,d20,#46 vmov d29,d20 vsli.64 d26,d20,#23 #if 24<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d21,d22 @ Ch(e,f,g) vshr.u64 d24,d16,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d23 vshr.u64 d25,d16,#34 vsli.64 d24,d16,#36 vadd.i64 d27,d26 vshr.u64 d26,d16,#39 vadd.i64 d28,d8 vsli.64 d25,d16,#30 veor d30,d16,d17 vsli.64 d26,d16,#25 veor d23,d24,d25 vadd.i64 d27,d28 vbsl d30,d18,d17 @ Maj(a,b,c) veor d23,d26 @ Sigma0(a) vadd.i64 d19,d27 vadd.i64 d30,d27 @ vadd.i64 d23,d30 vshr.u64 d24,d19,#14 @ 25 #if 25<16 vld1.64 {d9},[r1]! @ handles unaligned #endif vshr.u64 d25,d19,#18 #if 25>0 vadd.i64 d23,d30 @ h+=Maj from the past #endif vshr.u64 d26,d19,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d19,#50 vsli.64 d25,d19,#46 vmov d29,d19 vsli.64 d26,d19,#23 #if 25<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d20,d21 @ Ch(e,f,g) vshr.u64 d24,d23,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d22 vshr.u64 d25,d23,#34 vsli.64 d24,d23,#36 vadd.i64 d27,d26 vshr.u64 d26,d23,#39 vadd.i64 d28,d9 vsli.64 d25,d23,#30 veor d30,d23,d16 vsli.64 d26,d23,#25 veor d22,d24,d25 vadd.i64 d27,d28 vbsl d30,d17,d16 @ Maj(a,b,c) veor d22,d26 @ Sigma0(a) vadd.i64 d18,d27 vadd.i64 d30,d27 @ vadd.i64 d22,d30 vshr.u64 q12,q4,#19 vshr.u64 q13,q4,#61 vadd.i64 d22,d30 @ h+=Maj from the past vshr.u64 q15,q4,#6 vsli.64 q12,q4,#45 vext.8 q14,q5,q6,#8 @ X[i+1] vsli.64 q13,q4,#3 veor q15,q12 vshr.u64 q12,q14,#1 veor q15,q13 @ sigma1(X[i+14]) vshr.u64 q13,q14,#8 vadd.i64 q5,q15 vshr.u64 q15,q14,#7 vsli.64 q12,q14,#63 vsli.64 q13,q14,#56 vext.8 q14,q1,q2,#8 @ X[i+9] veor q15,q12 vshr.u64 d24,d18,#14 @ from NEON_00_15 vadd.i64 q5,q14 vshr.u64 d25,d18,#18 @ from NEON_00_15 veor q15,q13 @ sigma0(X[i+1]) vshr.u64 d26,d18,#41 @ from NEON_00_15 vadd.i64 q5,q15 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d18,#50 vsli.64 d25,d18,#46 vmov d29,d18 vsli.64 d26,d18,#23 #if 26<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d19,d20 @ Ch(e,f,g) vshr.u64 d24,d22,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d21 vshr.u64 d25,d22,#34 vsli.64 d24,d22,#36 vadd.i64 d27,d26 vshr.u64 d26,d22,#39 vadd.i64 d28,d10 vsli.64 d25,d22,#30 veor d30,d22,d23 vsli.64 d26,d22,#25 veor d21,d24,d25 vadd.i64 d27,d28 vbsl d30,d16,d23 @ Maj(a,b,c) veor d21,d26 @ Sigma0(a) vadd.i64 d17,d27 vadd.i64 d30,d27 @ vadd.i64 d21,d30 vshr.u64 d24,d17,#14 @ 27 #if 27<16 vld1.64 {d11},[r1]! @ handles unaligned #endif vshr.u64 d25,d17,#18 #if 27>0 vadd.i64 d21,d30 @ h+=Maj from the past #endif vshr.u64 d26,d17,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d17,#50 vsli.64 d25,d17,#46 vmov d29,d17 vsli.64 d26,d17,#23 #if 27<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d18,d19 @ Ch(e,f,g) vshr.u64 d24,d21,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d20 vshr.u64 d25,d21,#34 vsli.64 d24,d21,#36 vadd.i64 d27,d26 vshr.u64 d26,d21,#39 vadd.i64 d28,d11 vsli.64 d25,d21,#30 veor d30,d21,d22 vsli.64 d26,d21,#25 veor d20,d24,d25 vadd.i64 d27,d28 vbsl d30,d23,d22 @ Maj(a,b,c) veor d20,d26 @ Sigma0(a) vadd.i64 d16,d27 vadd.i64 d30,d27 @ vadd.i64 d20,d30 vshr.u64 q12,q5,#19 vshr.u64 q13,q5,#61 vadd.i64 d20,d30 @ h+=Maj from the past vshr.u64 q15,q5,#6 vsli.64 q12,q5,#45 vext.8 q14,q6,q7,#8 @ X[i+1] vsli.64 q13,q5,#3 veor q15,q12 vshr.u64 q12,q14,#1 veor q15,q13 @ sigma1(X[i+14]) vshr.u64 q13,q14,#8 vadd.i64 q6,q15 vshr.u64 q15,q14,#7 vsli.64 q12,q14,#63 vsli.64 q13,q14,#56 vext.8 q14,q2,q3,#8 @ X[i+9] veor q15,q12 vshr.u64 d24,d16,#14 @ from NEON_00_15 vadd.i64 q6,q14 vshr.u64 d25,d16,#18 @ from NEON_00_15 veor q15,q13 @ sigma0(X[i+1]) vshr.u64 d26,d16,#41 @ from NEON_00_15 vadd.i64 q6,q15 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d16,#50 vsli.64 d25,d16,#46 vmov d29,d16 vsli.64 d26,d16,#23 #if 28<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d17,d18 @ Ch(e,f,g) vshr.u64 d24,d20,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d19 vshr.u64 d25,d20,#34 vsli.64 d24,d20,#36 vadd.i64 d27,d26 vshr.u64 d26,d20,#39 vadd.i64 d28,d12 vsli.64 d25,d20,#30 veor d30,d20,d21 vsli.64 d26,d20,#25 veor d19,d24,d25 vadd.i64 d27,d28 vbsl d30,d22,d21 @ Maj(a,b,c) veor d19,d26 @ Sigma0(a) vadd.i64 d23,d27 vadd.i64 d30,d27 @ vadd.i64 d19,d30 vshr.u64 d24,d23,#14 @ 29 #if 29<16 vld1.64 {d13},[r1]! @ handles unaligned #endif vshr.u64 d25,d23,#18 #if 29>0 vadd.i64 d19,d30 @ h+=Maj from the past #endif vshr.u64 d26,d23,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d23,#50 vsli.64 d25,d23,#46 vmov d29,d23 vsli.64 d26,d23,#23 #if 29<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d16,d17 @ Ch(e,f,g) vshr.u64 d24,d19,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d18 vshr.u64 d25,d19,#34 vsli.64 d24,d19,#36 vadd.i64 d27,d26 vshr.u64 d26,d19,#39 vadd.i64 d28,d13 vsli.64 d25,d19,#30 veor d30,d19,d20 vsli.64 d26,d19,#25 veor d18,d24,d25 vadd.i64 d27,d28 vbsl d30,d21,d20 @ Maj(a,b,c) veor d18,d26 @ Sigma0(a) vadd.i64 d22,d27 vadd.i64 d30,d27 @ vadd.i64 d18,d30 vshr.u64 q12,q6,#19 vshr.u64 q13,q6,#61 vadd.i64 d18,d30 @ h+=Maj from the past vshr.u64 q15,q6,#6 vsli.64 q12,q6,#45 vext.8 q14,q7,q0,#8 @ X[i+1] vsli.64 q13,q6,#3 veor q15,q12 vshr.u64 q12,q14,#1 veor q15,q13 @ sigma1(X[i+14]) vshr.u64 q13,q14,#8 vadd.i64 q7,q15 vshr.u64 q15,q14,#7 vsli.64 q12,q14,#63 vsli.64 q13,q14,#56 vext.8 q14,q3,q4,#8 @ X[i+9] veor q15,q12 vshr.u64 d24,d22,#14 @ from NEON_00_15 vadd.i64 q7,q14 vshr.u64 d25,d22,#18 @ from NEON_00_15 veor q15,q13 @ sigma0(X[i+1]) vshr.u64 d26,d22,#41 @ from NEON_00_15 vadd.i64 q7,q15 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d22,#50 vsli.64 d25,d22,#46 vmov d29,d22 vsli.64 d26,d22,#23 #if 30<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d23,d16 @ Ch(e,f,g) vshr.u64 d24,d18,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d17 vshr.u64 d25,d18,#34 vsli.64 d24,d18,#36 vadd.i64 d27,d26 vshr.u64 d26,d18,#39 vadd.i64 d28,d14 vsli.64 d25,d18,#30 veor d30,d18,d19 vsli.64 d26,d18,#25 veor d17,d24,d25 vadd.i64 d27,d28 vbsl d30,d20,d19 @ Maj(a,b,c) veor d17,d26 @ Sigma0(a) vadd.i64 d21,d27 vadd.i64 d30,d27 @ vadd.i64 d17,d30 vshr.u64 d24,d21,#14 @ 31 #if 31<16 vld1.64 {d15},[r1]! @ handles unaligned #endif vshr.u64 d25,d21,#18 #if 31>0 vadd.i64 d17,d30 @ h+=Maj from the past #endif vshr.u64 d26,d21,#41 vld1.64 {d28},[r3,:64]! @ K[i++] vsli.64 d24,d21,#50 vsli.64 d25,d21,#46 vmov d29,d21 vsli.64 d26,d21,#23 #if 31<16 && defined(__ARMEL__) vrev64.8 , #endif veor d25,d24 vbsl d29,d22,d23 @ Ch(e,f,g) vshr.u64 d24,d17,#28 veor d26,d25 @ Sigma1(e) vadd.i64 d27,d29,d16 vshr.u64 d25,d17,#34 vsli.64 d24,d17,#36 vadd.i64 d27,d26 vshr.u64 d26,d17,#39 vadd.i64 d28,d15 vsli.64 d25,d17,#30 veor d30,d17,d18 vsli.64 d26,d17,#25 veor d16,d24,d25 vadd.i64 d27,d28 vbsl d30,d19,d18 @ Maj(a,b,c) veor d16,d26 @ Sigma0(a) vadd.i64 d20,d27 vadd.i64 d30,d27 @ vadd.i64 d16,d30 bne .L16_79_neon vadd.i64 d16,d30 @ h+=Maj from the past vldmia r0,{d24,d25,d26,d27,d28,d29,d30,d31} @ load context to temp vadd.i64 q8,q12 @ vectorized accumulate vadd.i64 q9,q13 vadd.i64 q10,q14 vadd.i64 q11,q15 vstmia r0,{d16,d17,d18,d19,d20,d21,d22,d23} @ save context teq r1,r2 sub r3,#640 @ rewind K512 bne .Loop_neon VFP_ABI_POP bx lr @ .word 0xe12fff1e .size sha512_block_data_order_neon,.-sha512_block_data_order_neon #endif .byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,52,47,78,69,79,78,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_ARM) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
6,952
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-arm/crypto/fipsmodule/ghashv8-armx.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_ARM) && defined(__ELF__) #include <openssl/arm_arch.h> #if __ARM_MAX_ARCH__>=7 .text .fpu neon .code 32 #undef __thumb2__ .globl gcm_init_v8 .hidden gcm_init_v8 .type gcm_init_v8,%function .align 4 gcm_init_v8: AARCH64_VALID_CALL_TARGET vld1.64 {q9},[r1] @ load input H vmov.i8 q11,#0xe1 vshl.i64 q11,q11,#57 @ 0xc2.0 vext.8 q3,q9,q9,#8 vshr.u64 q10,q11,#63 vdup.32 q9,d18[1] vext.8 q8,q10,q11,#8 @ t0=0xc2....01 vshr.u64 q10,q3,#63 vshr.s32 q9,q9,#31 @ broadcast carry bit vand q10,q10,q8 vshl.i64 q3,q3,#1 vext.8 q10,q10,q10,#8 vand q8,q8,q9 vorr q3,q3,q10 @ H<<<=1 veor q12,q3,q8 @ twisted H vext.8 q12, q12, q12, #8 vst1.64 {q12},[r0]! @ store Htable[0] @ calculate H^2 vext.8 q8,q12,q12,#8 @ Karatsuba pre-processing .byte 0xa9,0x0e,0xa9,0xf2 @ pmull2 q0,q12,q12 veor q8,q8,q12 .byte 0xa8,0x4e,0xa8,0xf2 @ pmull q2,q12,q12 .byte 0xa0,0x2e,0xa0,0xf2 @ pmull q1,q8,q8 vext.8 q9,q0,q2,#8 @ Karatsuba post-processing veor q10,q0,q2 veor q1,q1,q9 veor q1,q1,q10 .byte 0x26,0x4e,0xe0,0xf2 @ pmull q10,q0,q11 @ 1st phase vmov d4,d3 @ Xh|Xm - 256-bit result vmov d3,d0 @ Xm is rotated Xl veor q0,q1,q10 vext.8 q10,q0,q0,#8 @ 2nd phase .byte 0x26,0x0e,0xa0,0xf2 @ pmull q0,q0,q11 veor q10,q10,q2 veor q9,q0,q10 vext.8 q14,q9,q9,#8 @ Karatsuba pre-processing veor q9,q9,q14 vext.8 q13,q8,q9,#8 @ pack Karatsuba pre-processed vst1.64 {q13},[r0]! @ store Htable[1..2] vst1.64 {q14},[r0]! @ store Htable[1..2] bx lr .size gcm_init_v8,.-gcm_init_v8 .globl gcm_gmult_v8 .hidden gcm_gmult_v8 .type gcm_gmult_v8,%function .align 4 gcm_gmult_v8: AARCH64_VALID_CALL_TARGET vld1.64 {q9},[r0] @ load Xi vmov.i8 q11,#0xe1 vld1.64 {q12,q13},[r1] @ load twisted H, ... vext.8 q12,q12,q12,#8 vshl.u64 q11,q11,#57 #ifndef __ARMEB__ vrev64.8 q9,q9 #endif vext.8 q3,q9,q9,#8 .byte 0x86,0x0e,0xa8,0xf2 @ pmull q0,q12,q3 @ H.lo·Xi.lo veor q9,q9,q3 @ Karatsuba pre-processing .byte 0x87,0x4e,0xa9,0xf2 @ pmull2 q2,q12,q3 @ H.hi·Xi.hi .byte 0xa2,0x2e,0xaa,0xf2 @ pmull q1,q13,q9 @ (H.lo+H.hi)·(Xi.lo+Xi.hi) vext.8 q9,q0,q2,#8 @ Karatsuba post-processing veor q10,q0,q2 veor q1,q1,q9 veor q1,q1,q10 .byte 0x26,0x4e,0xe0,0xf2 @ pmull q10,q0,q11 @ 1st phase of reduction vmov d4,d3 @ Xh|Xm - 256-bit result vmov d3,d0 @ Xm is rotated Xl veor q0,q1,q10 vext.8 q10,q0,q0,#8 @ 2nd phase of reduction .byte 0x26,0x0e,0xa0,0xf2 @ pmull q0,q0,q11 veor q10,q10,q2 veor q0,q0,q10 #ifndef __ARMEB__ vrev64.8 q0,q0 #endif vext.8 q0,q0,q0,#8 vst1.64 {q0},[r0] @ write out Xi bx lr .size gcm_gmult_v8,.-gcm_gmult_v8 .globl gcm_ghash_v8 .hidden gcm_ghash_v8 .type gcm_ghash_v8,%function .align 4 gcm_ghash_v8: AARCH64_VALID_CALL_TARGET vstmdb sp!,{d8,d9,d10,d11,d12,d13,d14,d15} @ 32-bit ABI says so vld1.64 {q0},[r0] @ load [rotated] Xi @ "[rotated]" means that @ loaded value would have @ to be rotated in order to @ make it appear as in @ algorithm specification subs r3,r3,#32 @ see if r3 is 32 or larger mov r12,#16 @ r12 is used as post- @ increment for input pointer; @ as loop is modulo-scheduled @ r12 is zeroed just in time @ to preclude overstepping @ inp[len], which means that @ last block[s] are actually @ loaded twice, but last @ copy is not processed vld1.64 {q12,q13},[r1]! @ load twisted H, ..., H^2 vext.8 q12,q12,q12,#8 vmov.i8 q11,#0xe1 vld1.64 {q14},[r1] vext.8 q14,q14,q14,#8 moveq r12,#0 @ is it time to zero r12? vext.8 q0,q0,q0,#8 @ rotate Xi vld1.64 {q8},[r2]! @ load [rotated] I[0] vshl.u64 q11,q11,#57 @ compose 0xc2.0 constant #ifndef __ARMEB__ vrev64.8 q8,q8 vrev64.8 q0,q0 #endif vext.8 q3,q8,q8,#8 @ rotate I[0] blo .Lodd_tail_v8 @ r3 was less than 32 vld1.64 {q9},[r2],r12 @ load [rotated] I[1] #ifndef __ARMEB__ vrev64.8 q9,q9 #endif vext.8 q7,q9,q9,#8 veor q3,q3,q0 @ I[i]^=Xi .byte 0x8e,0x8e,0xa8,0xf2 @ pmull q4,q12,q7 @ H·Ii+1 veor q9,q9,q7 @ Karatsuba pre-processing .byte 0x8f,0xce,0xa9,0xf2 @ pmull2 q6,q12,q7 b .Loop_mod2x_v8 .align 4 .Loop_mod2x_v8: vext.8 q10,q3,q3,#8 subs r3,r3,#32 @ is there more data? .byte 0x86,0x0e,0xac,0xf2 @ pmull q0,q14,q3 @ H^2.lo·Xi.lo movlo r12,#0 @ is it time to zero r12? .byte 0xa2,0xae,0xaa,0xf2 @ pmull q5,q13,q9 veor q10,q10,q3 @ Karatsuba pre-processing .byte 0x87,0x4e,0xad,0xf2 @ pmull2 q2,q14,q3 @ H^2.hi·Xi.hi veor q0,q0,q4 @ accumulate .byte 0xa5,0x2e,0xab,0xf2 @ pmull2 q1,q13,q10 @ (H^2.lo+H^2.hi)·(Xi.lo+Xi.hi) vld1.64 {q8},[r2],r12 @ load [rotated] I[i+2] veor q2,q2,q6 moveq r12,#0 @ is it time to zero r12? veor q1,q1,q5 vext.8 q9,q0,q2,#8 @ Karatsuba post-processing veor q10,q0,q2 veor q1,q1,q9 vld1.64 {q9},[r2],r12 @ load [rotated] I[i+3] #ifndef __ARMEB__ vrev64.8 q8,q8 #endif veor q1,q1,q10 .byte 0x26,0x4e,0xe0,0xf2 @ pmull q10,q0,q11 @ 1st phase of reduction #ifndef __ARMEB__ vrev64.8 q9,q9 #endif vmov d4,d3 @ Xh|Xm - 256-bit result vmov d3,d0 @ Xm is rotated Xl vext.8 q7,q9,q9,#8 vext.8 q3,q8,q8,#8 veor q0,q1,q10 .byte 0x8e,0x8e,0xa8,0xf2 @ pmull q4,q12,q7 @ H·Ii+1 veor q3,q3,q2 @ accumulate q3 early vext.8 q10,q0,q0,#8 @ 2nd phase of reduction .byte 0x26,0x0e,0xa0,0xf2 @ pmull q0,q0,q11 veor q3,q3,q10 veor q9,q9,q7 @ Karatsuba pre-processing veor q3,q3,q0 .byte 0x8f,0xce,0xa9,0xf2 @ pmull2 q6,q12,q7 bhs .Loop_mod2x_v8 @ there was at least 32 more bytes veor q2,q2,q10 vext.8 q3,q8,q8,#8 @ re-construct q3 adds r3,r3,#32 @ re-construct r3 veor q0,q0,q2 @ re-construct q0 beq .Ldone_v8 @ is r3 zero? .Lodd_tail_v8: vext.8 q10,q0,q0,#8 veor q3,q3,q0 @ inp^=Xi veor q9,q8,q10 @ q9 is rotated inp^Xi .byte 0x86,0x0e,0xa8,0xf2 @ pmull q0,q12,q3 @ H.lo·Xi.lo veor q9,q9,q3 @ Karatsuba pre-processing .byte 0x87,0x4e,0xa9,0xf2 @ pmull2 q2,q12,q3 @ H.hi·Xi.hi .byte 0xa2,0x2e,0xaa,0xf2 @ pmull q1,q13,q9 @ (H.lo+H.hi)·(Xi.lo+Xi.hi) vext.8 q9,q0,q2,#8 @ Karatsuba post-processing veor q10,q0,q2 veor q1,q1,q9 veor q1,q1,q10 .byte 0x26,0x4e,0xe0,0xf2 @ pmull q10,q0,q11 @ 1st phase of reduction vmov d4,d3 @ Xh|Xm - 256-bit result vmov d3,d0 @ Xm is rotated Xl veor q0,q1,q10 vext.8 q10,q0,q0,#8 @ 2nd phase of reduction .byte 0x26,0x0e,0xa0,0xf2 @ pmull q0,q0,q11 veor q10,q10,q2 veor q0,q0,q10 .Ldone_v8: #ifndef __ARMEB__ vrev64.8 q0,q0 #endif vext.8 q0,q0,q0,#8 vst1.64 {q0},[r0] @ write out Xi vldmia sp!,{d8,d9,d10,d11,d12,d13,d14,d15} @ 32-bit ABI says so bx lr .size gcm_ghash_v8,.-gcm_ghash_v8 .byte 71,72,65,83,72,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_ARM) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
6,298
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-arm/crypto/fipsmodule/ghash-armv4.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_ARM) && defined(__ELF__) #include <openssl/arm_arch.h> @ Silence ARMv8 deprecated IT instruction warnings. This file is used by both @ ARMv7 and ARMv8 processors and does not use ARMv8 instructions. (ARMv8 PMULL @ instructions are in aesv8-armx.pl.) .arch armv7-a .text #if defined(__thumb2__) || defined(__clang__) .syntax unified #define ldrplb ldrbpl #define ldrneb ldrbne #endif #if defined(__thumb2__) .thumb #else .code 32 #endif #if __ARM_MAX_ARCH__>=7 .arch armv7-a .fpu neon .globl gcm_init_neon .hidden gcm_init_neon .type gcm_init_neon,%function .align 4 gcm_init_neon: vld1.64 d7,[r1]! @ load H vmov.i8 q8,#0xe1 vld1.64 d6,[r1] vshl.i64 d17,#57 vshr.u64 d16,#63 @ t0=0xc2....01 vdup.8 q9,d7[7] vshr.u64 d26,d6,#63 vshr.s8 q9,#7 @ broadcast carry bit vshl.i64 q3,q3,#1 vand q8,q8,q9 vorr d7,d26 @ H<<<=1 veor q3,q3,q8 @ twisted H vstmia r0,{q3} bx lr @ bx lr .size gcm_init_neon,.-gcm_init_neon .globl gcm_gmult_neon .hidden gcm_gmult_neon .type gcm_gmult_neon,%function .align 4 gcm_gmult_neon: vld1.64 d7,[r0]! @ load Xi vld1.64 d6,[r0]! vmov.i64 d29,#0x0000ffffffffffff vldmia r1,{d26,d27} @ load twisted H vmov.i64 d30,#0x00000000ffffffff #ifdef __ARMEL__ vrev64.8 q3,q3 #endif vmov.i64 d31,#0x000000000000ffff veor d28,d26,d27 @ Karatsuba pre-processing mov r3,#16 b .Lgmult_neon .size gcm_gmult_neon,.-gcm_gmult_neon .globl gcm_ghash_neon .hidden gcm_ghash_neon .type gcm_ghash_neon,%function .align 4 gcm_ghash_neon: vld1.64 d1,[r0]! @ load Xi vld1.64 d0,[r0]! vmov.i64 d29,#0x0000ffffffffffff vldmia r1,{d26,d27} @ load twisted H vmov.i64 d30,#0x00000000ffffffff #ifdef __ARMEL__ vrev64.8 q0,q0 #endif vmov.i64 d31,#0x000000000000ffff veor d28,d26,d27 @ Karatsuba pre-processing .Loop_neon: vld1.64 d7,[r2]! @ load inp vld1.64 d6,[r2]! #ifdef __ARMEL__ vrev64.8 q3,q3 #endif veor q3,q0 @ inp^=Xi .Lgmult_neon: vext.8 d16, d26, d26, #1 @ A1 vmull.p8 q8, d16, d6 @ F = A1*B vext.8 d0, d6, d6, #1 @ B1 vmull.p8 q0, d26, d0 @ E = A*B1 vext.8 d18, d26, d26, #2 @ A2 vmull.p8 q9, d18, d6 @ H = A2*B vext.8 d22, d6, d6, #2 @ B2 vmull.p8 q11, d26, d22 @ G = A*B2 vext.8 d20, d26, d26, #3 @ A3 veor q8, q8, q0 @ L = E + F vmull.p8 q10, d20, d6 @ J = A3*B vext.8 d0, d6, d6, #3 @ B3 veor q9, q9, q11 @ M = G + H vmull.p8 q0, d26, d0 @ I = A*B3 veor d16, d16, d17 @ t0 = (L) (P0 + P1) << 8 vand d17, d17, d29 vext.8 d22, d6, d6, #4 @ B4 veor d18, d18, d19 @ t1 = (M) (P2 + P3) << 16 vand d19, d19, d30 vmull.p8 q11, d26, d22 @ K = A*B4 veor q10, q10, q0 @ N = I + J veor d16, d16, d17 veor d18, d18, d19 veor d20, d20, d21 @ t2 = (N) (P4 + P5) << 24 vand d21, d21, d31 vext.8 q8, q8, q8, #15 veor d22, d22, d23 @ t3 = (K) (P6 + P7) << 32 vmov.i64 d23, #0 vext.8 q9, q9, q9, #14 veor d20, d20, d21 vmull.p8 q0, d26, d6 @ D = A*B vext.8 q11, q11, q11, #12 vext.8 q10, q10, q10, #13 veor q8, q8, q9 veor q10, q10, q11 veor q0, q0, q8 veor q0, q0, q10 veor d6,d6,d7 @ Karatsuba pre-processing vext.8 d16, d28, d28, #1 @ A1 vmull.p8 q8, d16, d6 @ F = A1*B vext.8 d2, d6, d6, #1 @ B1 vmull.p8 q1, d28, d2 @ E = A*B1 vext.8 d18, d28, d28, #2 @ A2 vmull.p8 q9, d18, d6 @ H = A2*B vext.8 d22, d6, d6, #2 @ B2 vmull.p8 q11, d28, d22 @ G = A*B2 vext.8 d20, d28, d28, #3 @ A3 veor q8, q8, q1 @ L = E + F vmull.p8 q10, d20, d6 @ J = A3*B vext.8 d2, d6, d6, #3 @ B3 veor q9, q9, q11 @ M = G + H vmull.p8 q1, d28, d2 @ I = A*B3 veor d16, d16, d17 @ t0 = (L) (P0 + P1) << 8 vand d17, d17, d29 vext.8 d22, d6, d6, #4 @ B4 veor d18, d18, d19 @ t1 = (M) (P2 + P3) << 16 vand d19, d19, d30 vmull.p8 q11, d28, d22 @ K = A*B4 veor q10, q10, q1 @ N = I + J veor d16, d16, d17 veor d18, d18, d19 veor d20, d20, d21 @ t2 = (N) (P4 + P5) << 24 vand d21, d21, d31 vext.8 q8, q8, q8, #15 veor d22, d22, d23 @ t3 = (K) (P6 + P7) << 32 vmov.i64 d23, #0 vext.8 q9, q9, q9, #14 veor d20, d20, d21 vmull.p8 q1, d28, d6 @ D = A*B vext.8 q11, q11, q11, #12 vext.8 q10, q10, q10, #13 veor q8, q8, q9 veor q10, q10, q11 veor q1, q1, q8 veor q1, q1, q10 vext.8 d16, d27, d27, #1 @ A1 vmull.p8 q8, d16, d7 @ F = A1*B vext.8 d4, d7, d7, #1 @ B1 vmull.p8 q2, d27, d4 @ E = A*B1 vext.8 d18, d27, d27, #2 @ A2 vmull.p8 q9, d18, d7 @ H = A2*B vext.8 d22, d7, d7, #2 @ B2 vmull.p8 q11, d27, d22 @ G = A*B2 vext.8 d20, d27, d27, #3 @ A3 veor q8, q8, q2 @ L = E + F vmull.p8 q10, d20, d7 @ J = A3*B vext.8 d4, d7, d7, #3 @ B3 veor q9, q9, q11 @ M = G + H vmull.p8 q2, d27, d4 @ I = A*B3 veor d16, d16, d17 @ t0 = (L) (P0 + P1) << 8 vand d17, d17, d29 vext.8 d22, d7, d7, #4 @ B4 veor d18, d18, d19 @ t1 = (M) (P2 + P3) << 16 vand d19, d19, d30 vmull.p8 q11, d27, d22 @ K = A*B4 veor q10, q10, q2 @ N = I + J veor d16, d16, d17 veor d18, d18, d19 veor d20, d20, d21 @ t2 = (N) (P4 + P5) << 24 vand d21, d21, d31 vext.8 q8, q8, q8, #15 veor d22, d22, d23 @ t3 = (K) (P6 + P7) << 32 vmov.i64 d23, #0 vext.8 q9, q9, q9, #14 veor d20, d20, d21 vmull.p8 q2, d27, d7 @ D = A*B vext.8 q11, q11, q11, #12 vext.8 q10, q10, q10, #13 veor q8, q8, q9 veor q10, q10, q11 veor q2, q2, q8 veor q2, q2, q10 veor q1,q1,q0 @ Karatsuba post-processing veor q1,q1,q2 veor d1,d1,d2 veor d4,d4,d3 @ Xh|Xl - 256-bit result @ equivalent of reduction_avx from ghash-x86_64.pl vshl.i64 q9,q0,#57 @ 1st phase vshl.i64 q10,q0,#62 veor q10,q10,q9 @ vshl.i64 q9,q0,#63 veor q10, q10, q9 @ veor d1,d1,d20 @ veor d4,d4,d21 vshr.u64 q10,q0,#1 @ 2nd phase veor q2,q2,q0 veor q0,q0,q10 @ vshr.u64 q10,q10,#6 vshr.u64 q0,q0,#1 @ veor q0,q0,q2 @ veor q0,q0,q10 @ subs r3,#16 bne .Loop_neon #ifdef __ARMEL__ vrev64.8 q0,q0 #endif sub r0,#16 vst1.64 d1,[r0]! @ write out Xi vst1.64 d0,[r0] bx lr @ bx lr .size gcm_ghash_neon,.-gcm_ghash_neon #endif .byte 71,72,65,83,72,32,102,111,114,32,65,82,77,118,52,47,78,69,79,78,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_ARM) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
31,713
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-arm/crypto/fipsmodule/sha1-armv4-large.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_ARM) && defined(__ELF__) #include <openssl/arm_arch.h> .text #if defined(__thumb2__) .syntax unified .thumb #else .code 32 #endif .globl sha1_block_data_order_nohw .hidden sha1_block_data_order_nohw .type sha1_block_data_order_nohw,%function .align 5 sha1_block_data_order_nohw: stmdb sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} add r2,r1,r2,lsl#6 @ r2 to point at the end of r1 ldmia r0,{r3,r4,r5,r6,r7} .Lloop: ldr r8,.LK_00_19 mov r14,sp sub sp,sp,#15*4 mov r5,r5,ror#30 mov r6,r6,ror#30 mov r7,r7,ror#30 @ [6] .L_00_15: #if __ARM_ARCH<7 ldrb r10,[r1,#2] ldrb r9,[r1,#3] ldrb r11,[r1,#1] add r7,r8,r7,ror#2 @ E+=K_00_19 ldrb r12,[r1],#4 orr r9,r9,r10,lsl#8 eor r10,r5,r6 @ F_xx_xx orr r9,r9,r11,lsl#16 add r7,r7,r3,ror#27 @ E+=ROR(A,27) orr r9,r9,r12,lsl#24 #else ldr r9,[r1],#4 @ handles unaligned add r7,r8,r7,ror#2 @ E+=K_00_19 eor r10,r5,r6 @ F_xx_xx add r7,r7,r3,ror#27 @ E+=ROR(A,27) #ifdef __ARMEL__ rev r9,r9 @ byte swap #endif #endif and r10,r4,r10,ror#2 add r7,r7,r9 @ E+=X[i] eor r10,r10,r6,ror#2 @ F_00_19(B,C,D) str r9,[r14,#-4]! add r7,r7,r10 @ E+=F_00_19(B,C,D) #if __ARM_ARCH<7 ldrb r10,[r1,#2] ldrb r9,[r1,#3] ldrb r11,[r1,#1] add r6,r8,r6,ror#2 @ E+=K_00_19 ldrb r12,[r1],#4 orr r9,r9,r10,lsl#8 eor r10,r4,r5 @ F_xx_xx orr r9,r9,r11,lsl#16 add r6,r6,r7,ror#27 @ E+=ROR(A,27) orr r9,r9,r12,lsl#24 #else ldr r9,[r1],#4 @ handles unaligned add r6,r8,r6,ror#2 @ E+=K_00_19 eor r10,r4,r5 @ F_xx_xx add r6,r6,r7,ror#27 @ E+=ROR(A,27) #ifdef __ARMEL__ rev r9,r9 @ byte swap #endif #endif and r10,r3,r10,ror#2 add r6,r6,r9 @ E+=X[i] eor r10,r10,r5,ror#2 @ F_00_19(B,C,D) str r9,[r14,#-4]! add r6,r6,r10 @ E+=F_00_19(B,C,D) #if __ARM_ARCH<7 ldrb r10,[r1,#2] ldrb r9,[r1,#3] ldrb r11,[r1,#1] add r5,r8,r5,ror#2 @ E+=K_00_19 ldrb r12,[r1],#4 orr r9,r9,r10,lsl#8 eor r10,r3,r4 @ F_xx_xx orr r9,r9,r11,lsl#16 add r5,r5,r6,ror#27 @ E+=ROR(A,27) orr r9,r9,r12,lsl#24 #else ldr r9,[r1],#4 @ handles unaligned add r5,r8,r5,ror#2 @ E+=K_00_19 eor r10,r3,r4 @ F_xx_xx add r5,r5,r6,ror#27 @ E+=ROR(A,27) #ifdef __ARMEL__ rev r9,r9 @ byte swap #endif #endif and r10,r7,r10,ror#2 add r5,r5,r9 @ E+=X[i] eor r10,r10,r4,ror#2 @ F_00_19(B,C,D) str r9,[r14,#-4]! add r5,r5,r10 @ E+=F_00_19(B,C,D) #if __ARM_ARCH<7 ldrb r10,[r1,#2] ldrb r9,[r1,#3] ldrb r11,[r1,#1] add r4,r8,r4,ror#2 @ E+=K_00_19 ldrb r12,[r1],#4 orr r9,r9,r10,lsl#8 eor r10,r7,r3 @ F_xx_xx orr r9,r9,r11,lsl#16 add r4,r4,r5,ror#27 @ E+=ROR(A,27) orr r9,r9,r12,lsl#24 #else ldr r9,[r1],#4 @ handles unaligned add r4,r8,r4,ror#2 @ E+=K_00_19 eor r10,r7,r3 @ F_xx_xx add r4,r4,r5,ror#27 @ E+=ROR(A,27) #ifdef __ARMEL__ rev r9,r9 @ byte swap #endif #endif and r10,r6,r10,ror#2 add r4,r4,r9 @ E+=X[i] eor r10,r10,r3,ror#2 @ F_00_19(B,C,D) str r9,[r14,#-4]! add r4,r4,r10 @ E+=F_00_19(B,C,D) #if __ARM_ARCH<7 ldrb r10,[r1,#2] ldrb r9,[r1,#3] ldrb r11,[r1,#1] add r3,r8,r3,ror#2 @ E+=K_00_19 ldrb r12,[r1],#4 orr r9,r9,r10,lsl#8 eor r10,r6,r7 @ F_xx_xx orr r9,r9,r11,lsl#16 add r3,r3,r4,ror#27 @ E+=ROR(A,27) orr r9,r9,r12,lsl#24 #else ldr r9,[r1],#4 @ handles unaligned add r3,r8,r3,ror#2 @ E+=K_00_19 eor r10,r6,r7 @ F_xx_xx add r3,r3,r4,ror#27 @ E+=ROR(A,27) #ifdef __ARMEL__ rev r9,r9 @ byte swap #endif #endif and r10,r5,r10,ror#2 add r3,r3,r9 @ E+=X[i] eor r10,r10,r7,ror#2 @ F_00_19(B,C,D) str r9,[r14,#-4]! add r3,r3,r10 @ E+=F_00_19(B,C,D) #if defined(__thumb2__) mov r12,sp teq r14,r12 #else teq r14,sp #endif bne .L_00_15 @ [((11+4)*5+2)*3] sub sp,sp,#25*4 #if __ARM_ARCH<7 ldrb r10,[r1,#2] ldrb r9,[r1,#3] ldrb r11,[r1,#1] add r7,r8,r7,ror#2 @ E+=K_00_19 ldrb r12,[r1],#4 orr r9,r9,r10,lsl#8 eor r10,r5,r6 @ F_xx_xx orr r9,r9,r11,lsl#16 add r7,r7,r3,ror#27 @ E+=ROR(A,27) orr r9,r9,r12,lsl#24 #else ldr r9,[r1],#4 @ handles unaligned add r7,r8,r7,ror#2 @ E+=K_00_19 eor r10,r5,r6 @ F_xx_xx add r7,r7,r3,ror#27 @ E+=ROR(A,27) #ifdef __ARMEL__ rev r9,r9 @ byte swap #endif #endif and r10,r4,r10,ror#2 add r7,r7,r9 @ E+=X[i] eor r10,r10,r6,ror#2 @ F_00_19(B,C,D) str r9,[r14,#-4]! add r7,r7,r10 @ E+=F_00_19(B,C,D) ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r6,r8,r6,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r4,r5 @ F_xx_xx mov r9,r9,ror#31 add r6,r6,r7,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r3,r10,ror#2 @ F_xx_xx @ F_xx_xx add r6,r6,r9 @ E+=X[i] eor r10,r10,r5,ror#2 @ F_00_19(B,C,D) add r6,r6,r10 @ E+=F_00_19(B,C,D) ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r5,r8,r5,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r3,r4 @ F_xx_xx mov r9,r9,ror#31 add r5,r5,r6,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r7,r10,ror#2 @ F_xx_xx @ F_xx_xx add r5,r5,r9 @ E+=X[i] eor r10,r10,r4,ror#2 @ F_00_19(B,C,D) add r5,r5,r10 @ E+=F_00_19(B,C,D) ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r4,r8,r4,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r7,r3 @ F_xx_xx mov r9,r9,ror#31 add r4,r4,r5,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r6,r10,ror#2 @ F_xx_xx @ F_xx_xx add r4,r4,r9 @ E+=X[i] eor r10,r10,r3,ror#2 @ F_00_19(B,C,D) add r4,r4,r10 @ E+=F_00_19(B,C,D) ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r3,r8,r3,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r6,r7 @ F_xx_xx mov r9,r9,ror#31 add r3,r3,r4,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r5,r10,ror#2 @ F_xx_xx @ F_xx_xx add r3,r3,r9 @ E+=X[i] eor r10,r10,r7,ror#2 @ F_00_19(B,C,D) add r3,r3,r10 @ E+=F_00_19(B,C,D) ldr r8,.LK_20_39 @ [+15+16*4] cmn sp,#0 @ [+3], clear carry to denote 20_39 .L_20_39_or_60_79: ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r7,r8,r7,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r5,r6 @ F_xx_xx mov r9,r9,ror#31 add r7,r7,r3,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! eor r10,r4,r10,ror#2 @ F_xx_xx @ F_xx_xx add r7,r7,r9 @ E+=X[i] add r7,r7,r10 @ E+=F_20_39(B,C,D) ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r6,r8,r6,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r4,r5 @ F_xx_xx mov r9,r9,ror#31 add r6,r6,r7,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! eor r10,r3,r10,ror#2 @ F_xx_xx @ F_xx_xx add r6,r6,r9 @ E+=X[i] add r6,r6,r10 @ E+=F_20_39(B,C,D) ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r5,r8,r5,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r3,r4 @ F_xx_xx mov r9,r9,ror#31 add r5,r5,r6,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! eor r10,r7,r10,ror#2 @ F_xx_xx @ F_xx_xx add r5,r5,r9 @ E+=X[i] add r5,r5,r10 @ E+=F_20_39(B,C,D) ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r4,r8,r4,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r7,r3 @ F_xx_xx mov r9,r9,ror#31 add r4,r4,r5,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! eor r10,r6,r10,ror#2 @ F_xx_xx @ F_xx_xx add r4,r4,r9 @ E+=X[i] add r4,r4,r10 @ E+=F_20_39(B,C,D) ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r3,r8,r3,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r6,r7 @ F_xx_xx mov r9,r9,ror#31 add r3,r3,r4,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! eor r10,r5,r10,ror#2 @ F_xx_xx @ F_xx_xx add r3,r3,r9 @ E+=X[i] add r3,r3,r10 @ E+=F_20_39(B,C,D) #if defined(__thumb2__) mov r12,sp teq r14,r12 #else teq r14,sp @ preserve carry #endif bne .L_20_39_or_60_79 @ [+((12+3)*5+2)*4] bcs .L_done @ [+((12+3)*5+2)*4], spare 300 bytes ldr r8,.LK_40_59 sub sp,sp,#20*4 @ [+2] .L_40_59: ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r7,r8,r7,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r5,r6 @ F_xx_xx mov r9,r9,ror#31 add r7,r7,r3,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r4,r10,ror#2 @ F_xx_xx and r11,r5,r6 @ F_xx_xx add r7,r7,r9 @ E+=X[i] add r7,r7,r10 @ E+=F_40_59(B,C,D) add r7,r7,r11,ror#2 ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r6,r8,r6,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r4,r5 @ F_xx_xx mov r9,r9,ror#31 add r6,r6,r7,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r3,r10,ror#2 @ F_xx_xx and r11,r4,r5 @ F_xx_xx add r6,r6,r9 @ E+=X[i] add r6,r6,r10 @ E+=F_40_59(B,C,D) add r6,r6,r11,ror#2 ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r5,r8,r5,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r3,r4 @ F_xx_xx mov r9,r9,ror#31 add r5,r5,r6,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r7,r10,ror#2 @ F_xx_xx and r11,r3,r4 @ F_xx_xx add r5,r5,r9 @ E+=X[i] add r5,r5,r10 @ E+=F_40_59(B,C,D) add r5,r5,r11,ror#2 ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r4,r8,r4,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r7,r3 @ F_xx_xx mov r9,r9,ror#31 add r4,r4,r5,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r6,r10,ror#2 @ F_xx_xx and r11,r7,r3 @ F_xx_xx add r4,r4,r9 @ E+=X[i] add r4,r4,r10 @ E+=F_40_59(B,C,D) add r4,r4,r11,ror#2 ldr r9,[r14,#15*4] ldr r10,[r14,#13*4] ldr r11,[r14,#7*4] add r3,r8,r3,ror#2 @ E+=K_xx_xx ldr r12,[r14,#2*4] eor r9,r9,r10 eor r11,r11,r12 @ 1 cycle stall eor r10,r6,r7 @ F_xx_xx mov r9,r9,ror#31 add r3,r3,r4,ror#27 @ E+=ROR(A,27) eor r9,r9,r11,ror#31 str r9,[r14,#-4]! and r10,r5,r10,ror#2 @ F_xx_xx and r11,r6,r7 @ F_xx_xx add r3,r3,r9 @ E+=X[i] add r3,r3,r10 @ E+=F_40_59(B,C,D) add r3,r3,r11,ror#2 #if defined(__thumb2__) mov r12,sp teq r14,r12 #else teq r14,sp #endif bne .L_40_59 @ [+((12+5)*5+2)*4] ldr r8,.LK_60_79 sub sp,sp,#20*4 cmp sp,#0 @ set carry to denote 60_79 b .L_20_39_or_60_79 @ [+4], spare 300 bytes .L_done: add sp,sp,#80*4 @ "deallocate" stack frame ldmia r0,{r8,r9,r10,r11,r12} add r3,r8,r3 add r4,r9,r4 add r5,r10,r5,ror#2 add r6,r11,r6,ror#2 add r7,r12,r7,ror#2 stmia r0,{r3,r4,r5,r6,r7} teq r1,r2 bne .Lloop @ [+18], total 1307 #if __ARM_ARCH>=5 ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc} #else ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} tst lr,#1 moveq pc,lr @ be binary compatible with V4, yet .word 0xe12fff1e @ interoperable with Thumb ISA:-) #endif .size sha1_block_data_order_nohw,.-sha1_block_data_order_nohw .align 5 .LK_00_19:.word 0x5a827999 .LK_20_39:.word 0x6ed9eba1 .LK_40_59:.word 0x8f1bbcdc .LK_60_79:.word 0xca62c1d6 .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,52,47,78,69,79,78,47,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 5 #if __ARM_MAX_ARCH__>=7 .arch armv7-a .fpu neon .globl sha1_block_data_order_neon .hidden sha1_block_data_order_neon .type sha1_block_data_order_neon,%function .align 4 sha1_block_data_order_neon: stmdb sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} add r2,r1,r2,lsl#6 @ r2 to point at the end of r1 @ dmb @ errata #451034 on early Cortex A8 @ vstmdb sp!,{d8-d15} @ ABI specification says so mov r14,sp sub r12,sp,#64 adr r8,.LK_00_19 bic r12,r12,#15 @ align for 128-bit stores ldmia r0,{r3,r4,r5,r6,r7} @ load context mov sp,r12 @ alloca vld1.8 {q0,q1},[r1]! @ handles unaligned veor q15,q15,q15 vld1.8 {q2,q3},[r1]! vld1.32 {d28[],d29[]},[r8,:32]! @ load K_00_19 vrev32.8 q0,q0 @ yes, even on vrev32.8 q1,q1 @ big-endian... vrev32.8 q2,q2 vadd.i32 q8,q0,q14 vrev32.8 q3,q3 vadd.i32 q9,q1,q14 vst1.32 {q8},[r12,:128]! vadd.i32 q10,q2,q14 vst1.32 {q9},[r12,:128]! vst1.32 {q10},[r12,:128]! ldr r9,[sp] @ big RAW stall .Loop_neon: vext.8 q8,q0,q1,#8 bic r10,r6,r4 add r7,r7,r9 and r11,r5,r4 vadd.i32 q13,q3,q14 ldr r9,[sp,#4] add r7,r7,r3,ror#27 vext.8 q12,q3,q15,#4 eor r11,r11,r10 mov r4,r4,ror#2 add r7,r7,r11 veor q8,q8,q0 bic r10,r5,r3 add r6,r6,r9 veor q12,q12,q2 and r11,r4,r3 ldr r9,[sp,#8] veor q12,q12,q8 add r6,r6,r7,ror#27 eor r11,r11,r10 vst1.32 {q13},[r12,:128]! sub r12,r12,#64 mov r3,r3,ror#2 add r6,r6,r11 vext.8 q13,q15,q12,#4 bic r10,r4,r7 add r5,r5,r9 vadd.i32 q8,q12,q12 and r11,r3,r7 ldr r9,[sp,#12] vsri.32 q8,q12,#31 add r5,r5,r6,ror#27 eor r11,r11,r10 mov r7,r7,ror#2 vshr.u32 q12,q13,#30 add r5,r5,r11 bic r10,r3,r6 vshl.u32 q13,q13,#2 add r4,r4,r9 and r11,r7,r6 veor q8,q8,q12 ldr r9,[sp,#16] add r4,r4,r5,ror#27 veor q8,q8,q13 eor r11,r11,r10 mov r6,r6,ror#2 add r4,r4,r11 vext.8 q9,q1,q2,#8 bic r10,r7,r5 add r3,r3,r9 and r11,r6,r5 vadd.i32 q13,q8,q14 ldr r9,[sp,#20] vld1.32 {d28[],d29[]},[r8,:32]! add r3,r3,r4,ror#27 vext.8 q12,q8,q15,#4 eor r11,r11,r10 mov r5,r5,ror#2 add r3,r3,r11 veor q9,q9,q1 bic r10,r6,r4 add r7,r7,r9 veor q12,q12,q3 and r11,r5,r4 ldr r9,[sp,#24] veor q12,q12,q9 add r7,r7,r3,ror#27 eor r11,r11,r10 vst1.32 {q13},[r12,:128]! mov r4,r4,ror#2 add r7,r7,r11 vext.8 q13,q15,q12,#4 bic r10,r5,r3 add r6,r6,r9 vadd.i32 q9,q12,q12 and r11,r4,r3 ldr r9,[sp,#28] vsri.32 q9,q12,#31 add r6,r6,r7,ror#27 eor r11,r11,r10 mov r3,r3,ror#2 vshr.u32 q12,q13,#30 add r6,r6,r11 bic r10,r4,r7 vshl.u32 q13,q13,#2 add r5,r5,r9 and r11,r3,r7 veor q9,q9,q12 ldr r9,[sp,#32] add r5,r5,r6,ror#27 veor q9,q9,q13 eor r11,r11,r10 mov r7,r7,ror#2 add r5,r5,r11 vext.8 q10,q2,q3,#8 bic r10,r3,r6 add r4,r4,r9 and r11,r7,r6 vadd.i32 q13,q9,q14 ldr r9,[sp,#36] add r4,r4,r5,ror#27 vext.8 q12,q9,q15,#4 eor r11,r11,r10 mov r6,r6,ror#2 add r4,r4,r11 veor q10,q10,q2 bic r10,r7,r5 add r3,r3,r9 veor q12,q12,q8 and r11,r6,r5 ldr r9,[sp,#40] veor q12,q12,q10 add r3,r3,r4,ror#27 eor r11,r11,r10 vst1.32 {q13},[r12,:128]! mov r5,r5,ror#2 add r3,r3,r11 vext.8 q13,q15,q12,#4 bic r10,r6,r4 add r7,r7,r9 vadd.i32 q10,q12,q12 and r11,r5,r4 ldr r9,[sp,#44] vsri.32 q10,q12,#31 add r7,r7,r3,ror#27 eor r11,r11,r10 mov r4,r4,ror#2 vshr.u32 q12,q13,#30 add r7,r7,r11 bic r10,r5,r3 vshl.u32 q13,q13,#2 add r6,r6,r9 and r11,r4,r3 veor q10,q10,q12 ldr r9,[sp,#48] add r6,r6,r7,ror#27 veor q10,q10,q13 eor r11,r11,r10 mov r3,r3,ror#2 add r6,r6,r11 vext.8 q11,q3,q8,#8 bic r10,r4,r7 add r5,r5,r9 and r11,r3,r7 vadd.i32 q13,q10,q14 ldr r9,[sp,#52] add r5,r5,r6,ror#27 vext.8 q12,q10,q15,#4 eor r11,r11,r10 mov r7,r7,ror#2 add r5,r5,r11 veor q11,q11,q3 bic r10,r3,r6 add r4,r4,r9 veor q12,q12,q9 and r11,r7,r6 ldr r9,[sp,#56] veor q12,q12,q11 add r4,r4,r5,ror#27 eor r11,r11,r10 vst1.32 {q13},[r12,:128]! mov r6,r6,ror#2 add r4,r4,r11 vext.8 q13,q15,q12,#4 bic r10,r7,r5 add r3,r3,r9 vadd.i32 q11,q12,q12 and r11,r6,r5 ldr r9,[sp,#60] vsri.32 q11,q12,#31 add r3,r3,r4,ror#27 eor r11,r11,r10 mov r5,r5,ror#2 vshr.u32 q12,q13,#30 add r3,r3,r11 bic r10,r6,r4 vshl.u32 q13,q13,#2 add r7,r7,r9 and r11,r5,r4 veor q11,q11,q12 ldr r9,[sp,#0] add r7,r7,r3,ror#27 veor q11,q11,q13 eor r11,r11,r10 mov r4,r4,ror#2 add r7,r7,r11 vext.8 q12,q10,q11,#8 bic r10,r5,r3 add r6,r6,r9 and r11,r4,r3 veor q0,q0,q8 ldr r9,[sp,#4] add r6,r6,r7,ror#27 veor q0,q0,q1 eor r11,r11,r10 mov r3,r3,ror#2 vadd.i32 q13,q11,q14 add r6,r6,r11 bic r10,r4,r7 veor q12,q12,q0 add r5,r5,r9 and r11,r3,r7 vshr.u32 q0,q12,#30 ldr r9,[sp,#8] add r5,r5,r6,ror#27 vst1.32 {q13},[r12,:128]! sub r12,r12,#64 eor r11,r11,r10 mov r7,r7,ror#2 vsli.32 q0,q12,#2 add r5,r5,r11 bic r10,r3,r6 add r4,r4,r9 and r11,r7,r6 ldr r9,[sp,#12] add r4,r4,r5,ror#27 eor r11,r11,r10 mov r6,r6,ror#2 add r4,r4,r11 bic r10,r7,r5 add r3,r3,r9 and r11,r6,r5 ldr r9,[sp,#16] add r3,r3,r4,ror#27 eor r11,r11,r10 mov r5,r5,ror#2 add r3,r3,r11 vext.8 q12,q11,q0,#8 eor r10,r4,r6 add r7,r7,r9 ldr r9,[sp,#20] veor q1,q1,q9 eor r11,r10,r5 add r7,r7,r3,ror#27 veor q1,q1,q2 mov r4,r4,ror#2 add r7,r7,r11 vadd.i32 q13,q0,q14 eor r10,r3,r5 add r6,r6,r9 veor q12,q12,q1 ldr r9,[sp,#24] eor r11,r10,r4 vshr.u32 q1,q12,#30 add r6,r6,r7,ror#27 mov r3,r3,ror#2 vst1.32 {q13},[r12,:128]! add r6,r6,r11 eor r10,r7,r4 vsli.32 q1,q12,#2 add r5,r5,r9 ldr r9,[sp,#28] eor r11,r10,r3 add r5,r5,r6,ror#27 mov r7,r7,ror#2 add r5,r5,r11 eor r10,r6,r3 add r4,r4,r9 ldr r9,[sp,#32] eor r11,r10,r7 add r4,r4,r5,ror#27 mov r6,r6,ror#2 add r4,r4,r11 vext.8 q12,q0,q1,#8 eor r10,r5,r7 add r3,r3,r9 ldr r9,[sp,#36] veor q2,q2,q10 eor r11,r10,r6 add r3,r3,r4,ror#27 veor q2,q2,q3 mov r5,r5,ror#2 add r3,r3,r11 vadd.i32 q13,q1,q14 eor r10,r4,r6 vld1.32 {d28[],d29[]},[r8,:32]! add r7,r7,r9 veor q12,q12,q2 ldr r9,[sp,#40] eor r11,r10,r5 vshr.u32 q2,q12,#30 add r7,r7,r3,ror#27 mov r4,r4,ror#2 vst1.32 {q13},[r12,:128]! add r7,r7,r11 eor r10,r3,r5 vsli.32 q2,q12,#2 add r6,r6,r9 ldr r9,[sp,#44] eor r11,r10,r4 add r6,r6,r7,ror#27 mov r3,r3,ror#2 add r6,r6,r11 eor r10,r7,r4 add r5,r5,r9 ldr r9,[sp,#48] eor r11,r10,r3 add r5,r5,r6,ror#27 mov r7,r7,ror#2 add r5,r5,r11 vext.8 q12,q1,q2,#8 eor r10,r6,r3 add r4,r4,r9 ldr r9,[sp,#52] veor q3,q3,q11 eor r11,r10,r7 add r4,r4,r5,ror#27 veor q3,q3,q8 mov r6,r6,ror#2 add r4,r4,r11 vadd.i32 q13,q2,q14 eor r10,r5,r7 add r3,r3,r9 veor q12,q12,q3 ldr r9,[sp,#56] eor r11,r10,r6 vshr.u32 q3,q12,#30 add r3,r3,r4,ror#27 mov r5,r5,ror#2 vst1.32 {q13},[r12,:128]! add r3,r3,r11 eor r10,r4,r6 vsli.32 q3,q12,#2 add r7,r7,r9 ldr r9,[sp,#60] eor r11,r10,r5 add r7,r7,r3,ror#27 mov r4,r4,ror#2 add r7,r7,r11 eor r10,r3,r5 add r6,r6,r9 ldr r9,[sp,#0] eor r11,r10,r4 add r6,r6,r7,ror#27 mov r3,r3,ror#2 add r6,r6,r11 vext.8 q12,q2,q3,#8 eor r10,r7,r4 add r5,r5,r9 ldr r9,[sp,#4] veor q8,q8,q0 eor r11,r10,r3 add r5,r5,r6,ror#27 veor q8,q8,q9 mov r7,r7,ror#2 add r5,r5,r11 vadd.i32 q13,q3,q14 eor r10,r6,r3 add r4,r4,r9 veor q12,q12,q8 ldr r9,[sp,#8] eor r11,r10,r7 vshr.u32 q8,q12,#30 add r4,r4,r5,ror#27 mov r6,r6,ror#2 vst1.32 {q13},[r12,:128]! sub r12,r12,#64 add r4,r4,r11 eor r10,r5,r7 vsli.32 q8,q12,#2 add r3,r3,r9 ldr r9,[sp,#12] eor r11,r10,r6 add r3,r3,r4,ror#27 mov r5,r5,ror#2 add r3,r3,r11 eor r10,r4,r6 add r7,r7,r9 ldr r9,[sp,#16] eor r11,r10,r5 add r7,r7,r3,ror#27 mov r4,r4,ror#2 add r7,r7,r11 vext.8 q12,q3,q8,#8 eor r10,r3,r5 add r6,r6,r9 ldr r9,[sp,#20] veor q9,q9,q1 eor r11,r10,r4 add r6,r6,r7,ror#27 veor q9,q9,q10 mov r3,r3,ror#2 add r6,r6,r11 vadd.i32 q13,q8,q14 eor r10,r7,r4 add r5,r5,r9 veor q12,q12,q9 ldr r9,[sp,#24] eor r11,r10,r3 vshr.u32 q9,q12,#30 add r5,r5,r6,ror#27 mov r7,r7,ror#2 vst1.32 {q13},[r12,:128]! add r5,r5,r11 eor r10,r6,r3 vsli.32 q9,q12,#2 add r4,r4,r9 ldr r9,[sp,#28] eor r11,r10,r7 add r4,r4,r5,ror#27 mov r6,r6,ror#2 add r4,r4,r11 eor r10,r5,r7 add r3,r3,r9 ldr r9,[sp,#32] eor r11,r10,r6 add r3,r3,r4,ror#27 mov r5,r5,ror#2 add r3,r3,r11 vext.8 q12,q8,q9,#8 add r7,r7,r9 and r10,r5,r6 ldr r9,[sp,#36] veor q10,q10,q2 add r7,r7,r3,ror#27 eor r11,r5,r6 veor q10,q10,q11 add r7,r7,r10 and r11,r11,r4 vadd.i32 q13,q9,q14 mov r4,r4,ror#2 add r7,r7,r11 veor q12,q12,q10 add r6,r6,r9 and r10,r4,r5 vshr.u32 q10,q12,#30 ldr r9,[sp,#40] add r6,r6,r7,ror#27 vst1.32 {q13},[r12,:128]! eor r11,r4,r5 add r6,r6,r10 vsli.32 q10,q12,#2 and r11,r11,r3 mov r3,r3,ror#2 add r6,r6,r11 add r5,r5,r9 and r10,r3,r4 ldr r9,[sp,#44] add r5,r5,r6,ror#27 eor r11,r3,r4 add r5,r5,r10 and r11,r11,r7 mov r7,r7,ror#2 add r5,r5,r11 add r4,r4,r9 and r10,r7,r3 ldr r9,[sp,#48] add r4,r4,r5,ror#27 eor r11,r7,r3 add r4,r4,r10 and r11,r11,r6 mov r6,r6,ror#2 add r4,r4,r11 vext.8 q12,q9,q10,#8 add r3,r3,r9 and r10,r6,r7 ldr r9,[sp,#52] veor q11,q11,q3 add r3,r3,r4,ror#27 eor r11,r6,r7 veor q11,q11,q0 add r3,r3,r10 and r11,r11,r5 vadd.i32 q13,q10,q14 mov r5,r5,ror#2 vld1.32 {d28[],d29[]},[r8,:32]! add r3,r3,r11 veor q12,q12,q11 add r7,r7,r9 and r10,r5,r6 vshr.u32 q11,q12,#30 ldr r9,[sp,#56] add r7,r7,r3,ror#27 vst1.32 {q13},[r12,:128]! eor r11,r5,r6 add r7,r7,r10 vsli.32 q11,q12,#2 and r11,r11,r4 mov r4,r4,ror#2 add r7,r7,r11 add r6,r6,r9 and r10,r4,r5 ldr r9,[sp,#60] add r6,r6,r7,ror#27 eor r11,r4,r5 add r6,r6,r10 and r11,r11,r3 mov r3,r3,ror#2 add r6,r6,r11 add r5,r5,r9 and r10,r3,r4 ldr r9,[sp,#0] add r5,r5,r6,ror#27 eor r11,r3,r4 add r5,r5,r10 and r11,r11,r7 mov r7,r7,ror#2 add r5,r5,r11 vext.8 q12,q10,q11,#8 add r4,r4,r9 and r10,r7,r3 ldr r9,[sp,#4] veor q0,q0,q8 add r4,r4,r5,ror#27 eor r11,r7,r3 veor q0,q0,q1 add r4,r4,r10 and r11,r11,r6 vadd.i32 q13,q11,q14 mov r6,r6,ror#2 add r4,r4,r11 veor q12,q12,q0 add r3,r3,r9 and r10,r6,r7 vshr.u32 q0,q12,#30 ldr r9,[sp,#8] add r3,r3,r4,ror#27 vst1.32 {q13},[r12,:128]! sub r12,r12,#64 eor r11,r6,r7 add r3,r3,r10 vsli.32 q0,q12,#2 and r11,r11,r5 mov r5,r5,ror#2 add r3,r3,r11 add r7,r7,r9 and r10,r5,r6 ldr r9,[sp,#12] add r7,r7,r3,ror#27 eor r11,r5,r6 add r7,r7,r10 and r11,r11,r4 mov r4,r4,ror#2 add r7,r7,r11 add r6,r6,r9 and r10,r4,r5 ldr r9,[sp,#16] add r6,r6,r7,ror#27 eor r11,r4,r5 add r6,r6,r10 and r11,r11,r3 mov r3,r3,ror#2 add r6,r6,r11 vext.8 q12,q11,q0,#8 add r5,r5,r9 and r10,r3,r4 ldr r9,[sp,#20] veor q1,q1,q9 add r5,r5,r6,ror#27 eor r11,r3,r4 veor q1,q1,q2 add r5,r5,r10 and r11,r11,r7 vadd.i32 q13,q0,q14 mov r7,r7,ror#2 add r5,r5,r11 veor q12,q12,q1 add r4,r4,r9 and r10,r7,r3 vshr.u32 q1,q12,#30 ldr r9,[sp,#24] add r4,r4,r5,ror#27 vst1.32 {q13},[r12,:128]! eor r11,r7,r3 add r4,r4,r10 vsli.32 q1,q12,#2 and r11,r11,r6 mov r6,r6,ror#2 add r4,r4,r11 add r3,r3,r9 and r10,r6,r7 ldr r9,[sp,#28] add r3,r3,r4,ror#27 eor r11,r6,r7 add r3,r3,r10 and r11,r11,r5 mov r5,r5,ror#2 add r3,r3,r11 add r7,r7,r9 and r10,r5,r6 ldr r9,[sp,#32] add r7,r7,r3,ror#27 eor r11,r5,r6 add r7,r7,r10 and r11,r11,r4 mov r4,r4,ror#2 add r7,r7,r11 vext.8 q12,q0,q1,#8 add r6,r6,r9 and r10,r4,r5 ldr r9,[sp,#36] veor q2,q2,q10 add r6,r6,r7,ror#27 eor r11,r4,r5 veor q2,q2,q3 add r6,r6,r10 and r11,r11,r3 vadd.i32 q13,q1,q14 mov r3,r3,ror#2 add r6,r6,r11 veor q12,q12,q2 add r5,r5,r9 and r10,r3,r4 vshr.u32 q2,q12,#30 ldr r9,[sp,#40] add r5,r5,r6,ror#27 vst1.32 {q13},[r12,:128]! eor r11,r3,r4 add r5,r5,r10 vsli.32 q2,q12,#2 and r11,r11,r7 mov r7,r7,ror#2 add r5,r5,r11 add r4,r4,r9 and r10,r7,r3 ldr r9,[sp,#44] add r4,r4,r5,ror#27 eor r11,r7,r3 add r4,r4,r10 and r11,r11,r6 mov r6,r6,ror#2 add r4,r4,r11 add r3,r3,r9 and r10,r6,r7 ldr r9,[sp,#48] add r3,r3,r4,ror#27 eor r11,r6,r7 add r3,r3,r10 and r11,r11,r5 mov r5,r5,ror#2 add r3,r3,r11 vext.8 q12,q1,q2,#8 eor r10,r4,r6 add r7,r7,r9 ldr r9,[sp,#52] veor q3,q3,q11 eor r11,r10,r5 add r7,r7,r3,ror#27 veor q3,q3,q8 mov r4,r4,ror#2 add r7,r7,r11 vadd.i32 q13,q2,q14 eor r10,r3,r5 add r6,r6,r9 veor q12,q12,q3 ldr r9,[sp,#56] eor r11,r10,r4 vshr.u32 q3,q12,#30 add r6,r6,r7,ror#27 mov r3,r3,ror#2 vst1.32 {q13},[r12,:128]! add r6,r6,r11 eor r10,r7,r4 vsli.32 q3,q12,#2 add r5,r5,r9 ldr r9,[sp,#60] eor r11,r10,r3 add r5,r5,r6,ror#27 mov r7,r7,ror#2 add r5,r5,r11 eor r10,r6,r3 add r4,r4,r9 ldr r9,[sp,#0] eor r11,r10,r7 add r4,r4,r5,ror#27 mov r6,r6,ror#2 add r4,r4,r11 vadd.i32 q13,q3,q14 eor r10,r5,r7 add r3,r3,r9 vst1.32 {q13},[r12,:128]! sub r12,r12,#64 teq r1,r2 sub r8,r8,#16 it eq subeq r1,r1,#64 vld1.8 {q0,q1},[r1]! ldr r9,[sp,#4] eor r11,r10,r6 vld1.8 {q2,q3},[r1]! add r3,r3,r4,ror#27 mov r5,r5,ror#2 vld1.32 {d28[],d29[]},[r8,:32]! add r3,r3,r11 eor r10,r4,r6 vrev32.8 q0,q0 add r7,r7,r9 ldr r9,[sp,#8] eor r11,r10,r5 add r7,r7,r3,ror#27 mov r4,r4,ror#2 add r7,r7,r11 eor r10,r3,r5 add r6,r6,r9 ldr r9,[sp,#12] eor r11,r10,r4 add r6,r6,r7,ror#27 mov r3,r3,ror#2 add r6,r6,r11 eor r10,r7,r4 add r5,r5,r9 ldr r9,[sp,#16] eor r11,r10,r3 add r5,r5,r6,ror#27 mov r7,r7,ror#2 add r5,r5,r11 vrev32.8 q1,q1 eor r10,r6,r3 add r4,r4,r9 vadd.i32 q8,q0,q14 ldr r9,[sp,#20] eor r11,r10,r7 vst1.32 {q8},[r12,:128]! add r4,r4,r5,ror#27 mov r6,r6,ror#2 add r4,r4,r11 eor r10,r5,r7 add r3,r3,r9 ldr r9,[sp,#24] eor r11,r10,r6 add r3,r3,r4,ror#27 mov r5,r5,ror#2 add r3,r3,r11 eor r10,r4,r6 add r7,r7,r9 ldr r9,[sp,#28] eor r11,r10,r5 add r7,r7,r3,ror#27 mov r4,r4,ror#2 add r7,r7,r11 eor r10,r3,r5 add r6,r6,r9 ldr r9,[sp,#32] eor r11,r10,r4 add r6,r6,r7,ror#27 mov r3,r3,ror#2 add r6,r6,r11 vrev32.8 q2,q2 eor r10,r7,r4 add r5,r5,r9 vadd.i32 q9,q1,q14 ldr r9,[sp,#36] eor r11,r10,r3 vst1.32 {q9},[r12,:128]! add r5,r5,r6,ror#27 mov r7,r7,ror#2 add r5,r5,r11 eor r10,r6,r3 add r4,r4,r9 ldr r9,[sp,#40] eor r11,r10,r7 add r4,r4,r5,ror#27 mov r6,r6,ror#2 add r4,r4,r11 eor r10,r5,r7 add r3,r3,r9 ldr r9,[sp,#44] eor r11,r10,r6 add r3,r3,r4,ror#27 mov r5,r5,ror#2 add r3,r3,r11 eor r10,r4,r6 add r7,r7,r9 ldr r9,[sp,#48] eor r11,r10,r5 add r7,r7,r3,ror#27 mov r4,r4,ror#2 add r7,r7,r11 vrev32.8 q3,q3 eor r10,r3,r5 add r6,r6,r9 vadd.i32 q10,q2,q14 ldr r9,[sp,#52] eor r11,r10,r4 vst1.32 {q10},[r12,:128]! add r6,r6,r7,ror#27 mov r3,r3,ror#2 add r6,r6,r11 eor r10,r7,r4 add r5,r5,r9 ldr r9,[sp,#56] eor r11,r10,r3 add r5,r5,r6,ror#27 mov r7,r7,ror#2 add r5,r5,r11 eor r10,r6,r3 add r4,r4,r9 ldr r9,[sp,#60] eor r11,r10,r7 add r4,r4,r5,ror#27 mov r6,r6,ror#2 add r4,r4,r11 eor r10,r5,r7 add r3,r3,r9 eor r11,r10,r6 add r3,r3,r4,ror#27 mov r5,r5,ror#2 add r3,r3,r11 ldmia r0,{r9,r10,r11,r12} @ accumulate context add r3,r3,r9 ldr r9,[r0,#16] add r4,r4,r10 add r5,r5,r11 add r6,r6,r12 it eq moveq sp,r14 add r7,r7,r9 it ne ldrne r9,[sp] stmia r0,{r3,r4,r5,r6,r7} itt ne addne r12,sp,#3*16 bne .Loop_neon @ vldmia sp!,{d8-d15} ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc} .size sha1_block_data_order_neon,.-sha1_block_data_order_neon #endif #if __ARM_MAX_ARCH__>=7 # if defined(__thumb2__) # define INST(a,b,c,d) .byte c,d|0xf,a,b # else # define INST(a,b,c,d) .byte a,b,c,d|0x10 # endif .globl sha1_block_data_order_hw .hidden sha1_block_data_order_hw .type sha1_block_data_order_hw,%function .align 5 sha1_block_data_order_hw: vstmdb sp!,{d8,d9,d10,d11,d12,d13,d14,d15} @ ABI specification says so veor q1,q1,q1 adr r3,.LK_00_19 vld1.32 {q0},[r0]! vld1.32 {d2[0]},[r0] sub r0,r0,#16 vld1.32 {d16[],d17[]},[r3,:32]! vld1.32 {d18[],d19[]},[r3,:32]! vld1.32 {d20[],d21[]},[r3,:32]! vld1.32 {d22[],d23[]},[r3,:32] .Loop_v8: vld1.8 {q4,q5},[r1]! vld1.8 {q6,q7},[r1]! vrev32.8 q4,q4 vrev32.8 q5,q5 vadd.i32 q12,q8,q4 vrev32.8 q6,q6 vmov q14,q0 @ offload subs r2,r2,#1 vadd.i32 q13,q8,q5 vrev32.8 q7,q7 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 0 INST(0x68,0x0c,0x02,0xe2) @ sha1c q0,q1,q12 vadd.i32 q12,q8,q6 INST(0x4c,0x8c,0x3a,0xe2) @ sha1su0 q4,q5,q6 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 1 INST(0x6a,0x0c,0x06,0xe2) @ sha1c q0,q3,q13 vadd.i32 q13,q8,q7 INST(0x8e,0x83,0xba,0xf3) @ sha1su1 q4,q7 INST(0x4e,0xac,0x3c,0xe2) @ sha1su0 q5,q6,q7 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 2 INST(0x68,0x0c,0x04,0xe2) @ sha1c q0,q2,q12 vadd.i32 q12,q8,q4 INST(0x88,0xa3,0xba,0xf3) @ sha1su1 q5,q4 INST(0x48,0xcc,0x3e,0xe2) @ sha1su0 q6,q7,q4 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 3 INST(0x6a,0x0c,0x06,0xe2) @ sha1c q0,q3,q13 vadd.i32 q13,q9,q5 INST(0x8a,0xc3,0xba,0xf3) @ sha1su1 q6,q5 INST(0x4a,0xec,0x38,0xe2) @ sha1su0 q7,q4,q5 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 4 INST(0x68,0x0c,0x04,0xe2) @ sha1c q0,q2,q12 vadd.i32 q12,q9,q6 INST(0x8c,0xe3,0xba,0xf3) @ sha1su1 q7,q6 INST(0x4c,0x8c,0x3a,0xe2) @ sha1su0 q4,q5,q6 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 5 INST(0x6a,0x0c,0x16,0xe2) @ sha1p q0,q3,q13 vadd.i32 q13,q9,q7 INST(0x8e,0x83,0xba,0xf3) @ sha1su1 q4,q7 INST(0x4e,0xac,0x3c,0xe2) @ sha1su0 q5,q6,q7 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 6 INST(0x68,0x0c,0x14,0xe2) @ sha1p q0,q2,q12 vadd.i32 q12,q9,q4 INST(0x88,0xa3,0xba,0xf3) @ sha1su1 q5,q4 INST(0x48,0xcc,0x3e,0xe2) @ sha1su0 q6,q7,q4 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 7 INST(0x6a,0x0c,0x16,0xe2) @ sha1p q0,q3,q13 vadd.i32 q13,q9,q5 INST(0x8a,0xc3,0xba,0xf3) @ sha1su1 q6,q5 INST(0x4a,0xec,0x38,0xe2) @ sha1su0 q7,q4,q5 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 8 INST(0x68,0x0c,0x14,0xe2) @ sha1p q0,q2,q12 vadd.i32 q12,q10,q6 INST(0x8c,0xe3,0xba,0xf3) @ sha1su1 q7,q6 INST(0x4c,0x8c,0x3a,0xe2) @ sha1su0 q4,q5,q6 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 9 INST(0x6a,0x0c,0x16,0xe2) @ sha1p q0,q3,q13 vadd.i32 q13,q10,q7 INST(0x8e,0x83,0xba,0xf3) @ sha1su1 q4,q7 INST(0x4e,0xac,0x3c,0xe2) @ sha1su0 q5,q6,q7 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 10 INST(0x68,0x0c,0x24,0xe2) @ sha1m q0,q2,q12 vadd.i32 q12,q10,q4 INST(0x88,0xa3,0xba,0xf3) @ sha1su1 q5,q4 INST(0x48,0xcc,0x3e,0xe2) @ sha1su0 q6,q7,q4 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 11 INST(0x6a,0x0c,0x26,0xe2) @ sha1m q0,q3,q13 vadd.i32 q13,q10,q5 INST(0x8a,0xc3,0xba,0xf3) @ sha1su1 q6,q5 INST(0x4a,0xec,0x38,0xe2) @ sha1su0 q7,q4,q5 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 12 INST(0x68,0x0c,0x24,0xe2) @ sha1m q0,q2,q12 vadd.i32 q12,q10,q6 INST(0x8c,0xe3,0xba,0xf3) @ sha1su1 q7,q6 INST(0x4c,0x8c,0x3a,0xe2) @ sha1su0 q4,q5,q6 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 13 INST(0x6a,0x0c,0x26,0xe2) @ sha1m q0,q3,q13 vadd.i32 q13,q11,q7 INST(0x8e,0x83,0xba,0xf3) @ sha1su1 q4,q7 INST(0x4e,0xac,0x3c,0xe2) @ sha1su0 q5,q6,q7 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 14 INST(0x68,0x0c,0x24,0xe2) @ sha1m q0,q2,q12 vadd.i32 q12,q11,q4 INST(0x88,0xa3,0xba,0xf3) @ sha1su1 q5,q4 INST(0x48,0xcc,0x3e,0xe2) @ sha1su0 q6,q7,q4 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 15 INST(0x6a,0x0c,0x16,0xe2) @ sha1p q0,q3,q13 vadd.i32 q13,q11,q5 INST(0x8a,0xc3,0xba,0xf3) @ sha1su1 q6,q5 INST(0x4a,0xec,0x38,0xe2) @ sha1su0 q7,q4,q5 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 16 INST(0x68,0x0c,0x14,0xe2) @ sha1p q0,q2,q12 vadd.i32 q12,q11,q6 INST(0x8c,0xe3,0xba,0xf3) @ sha1su1 q7,q6 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 17 INST(0x6a,0x0c,0x16,0xe2) @ sha1p q0,q3,q13 vadd.i32 q13,q11,q7 INST(0xc0,0x62,0xb9,0xf3) @ sha1h q3,q0 @ 18 INST(0x68,0x0c,0x14,0xe2) @ sha1p q0,q2,q12 INST(0xc0,0x42,0xb9,0xf3) @ sha1h q2,q0 @ 19 INST(0x6a,0x0c,0x16,0xe2) @ sha1p q0,q3,q13 vadd.i32 q1,q1,q2 vadd.i32 q0,q0,q14 bne .Loop_v8 vst1.32 {q0},[r0]! vst1.32 {d2[0]},[r0] vldmia sp!,{d8,d9,d10,d11,d12,d13,d14,d15} bx lr @ bx lr .size sha1_block_data_order_hw,.-sha1_block_data_order_hw #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_ARM) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
28,809
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-arm/crypto/chacha/chacha-armv4.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_ARM) && defined(__ELF__) #include <openssl/arm_arch.h> @ Silence ARMv8 deprecated IT instruction warnings. This file is used by both @ ARMv7 and ARMv8 processors and does not use ARMv8 instructions. .arch armv7-a .text #if defined(__thumb2__) || defined(__clang__) .syntax unified #endif #if defined(__thumb2__) .thumb #else .code 32 #endif #if defined(__thumb2__) || defined(__clang__) #define ldrhsb ldrbhs #endif .align 5 .Lsigma: .long 0x61707865,0x3320646e,0x79622d32,0x6b206574 @ endian-neutral .Lone: .long 1,0,0,0 .globl ChaCha20_ctr32_nohw .hidden ChaCha20_ctr32_nohw .type ChaCha20_ctr32_nohw,%function .align 5 ChaCha20_ctr32_nohw: ldr r12,[sp,#0] @ pull pointer to counter and nonce stmdb sp!,{r0,r1,r2,r4-r11,lr} adr r14,.Lsigma ldmia r12,{r4,r5,r6,r7} @ load counter and nonce sub sp,sp,#4*(16) @ off-load area stmdb sp!,{r4,r5,r6,r7} @ copy counter and nonce ldmia r3,{r4,r5,r6,r7,r8,r9,r10,r11} @ load key ldmia r14,{r0,r1,r2,r3} @ load sigma stmdb sp!,{r4,r5,r6,r7,r8,r9,r10,r11} @ copy key stmdb sp!,{r0,r1,r2,r3} @ copy sigma str r10,[sp,#4*(16+10)] @ off-load "rx" str r11,[sp,#4*(16+11)] @ off-load "rx" b .Loop_outer_enter .align 4 .Loop_outer: ldmia sp,{r0,r1,r2,r3,r4,r5,r6,r7,r8,r9} @ load key material str r11,[sp,#4*(32+2)] @ save len str r12, [sp,#4*(32+1)] @ save inp str r14, [sp,#4*(32+0)] @ save out .Loop_outer_enter: ldr r11, [sp,#4*(15)] ldr r12,[sp,#4*(12)] @ modulo-scheduled load ldr r10, [sp,#4*(13)] ldr r14,[sp,#4*(14)] str r11, [sp,#4*(16+15)] mov r11,#10 b .Loop .align 4 .Loop: subs r11,r11,#1 add r0,r0,r4 mov r12,r12,ror#16 add r1,r1,r5 mov r10,r10,ror#16 eor r12,r12,r0,ror#16 eor r10,r10,r1,ror#16 add r8,r8,r12 mov r4,r4,ror#20 add r9,r9,r10 mov r5,r5,ror#20 eor r4,r4,r8,ror#20 eor r5,r5,r9,ror#20 add r0,r0,r4 mov r12,r12,ror#24 add r1,r1,r5 mov r10,r10,ror#24 eor r12,r12,r0,ror#24 eor r10,r10,r1,ror#24 add r8,r8,r12 mov r4,r4,ror#25 add r9,r9,r10 mov r5,r5,ror#25 str r10,[sp,#4*(16+13)] ldr r10,[sp,#4*(16+15)] eor r4,r4,r8,ror#25 eor r5,r5,r9,ror#25 str r8,[sp,#4*(16+8)] ldr r8,[sp,#4*(16+10)] add r2,r2,r6 mov r14,r14,ror#16 str r9,[sp,#4*(16+9)] ldr r9,[sp,#4*(16+11)] add r3,r3,r7 mov r10,r10,ror#16 eor r14,r14,r2,ror#16 eor r10,r10,r3,ror#16 add r8,r8,r14 mov r6,r6,ror#20 add r9,r9,r10 mov r7,r7,ror#20 eor r6,r6,r8,ror#20 eor r7,r7,r9,ror#20 add r2,r2,r6 mov r14,r14,ror#24 add r3,r3,r7 mov r10,r10,ror#24 eor r14,r14,r2,ror#24 eor r10,r10,r3,ror#24 add r8,r8,r14 mov r6,r6,ror#25 add r9,r9,r10 mov r7,r7,ror#25 eor r6,r6,r8,ror#25 eor r7,r7,r9,ror#25 add r0,r0,r5 mov r10,r10,ror#16 add r1,r1,r6 mov r12,r12,ror#16 eor r10,r10,r0,ror#16 eor r12,r12,r1,ror#16 add r8,r8,r10 mov r5,r5,ror#20 add r9,r9,r12 mov r6,r6,ror#20 eor r5,r5,r8,ror#20 eor r6,r6,r9,ror#20 add r0,r0,r5 mov r10,r10,ror#24 add r1,r1,r6 mov r12,r12,ror#24 eor r10,r10,r0,ror#24 eor r12,r12,r1,ror#24 add r8,r8,r10 mov r5,r5,ror#25 str r10,[sp,#4*(16+15)] ldr r10,[sp,#4*(16+13)] add r9,r9,r12 mov r6,r6,ror#25 eor r5,r5,r8,ror#25 eor r6,r6,r9,ror#25 str r8,[sp,#4*(16+10)] ldr r8,[sp,#4*(16+8)] add r2,r2,r7 mov r10,r10,ror#16 str r9,[sp,#4*(16+11)] ldr r9,[sp,#4*(16+9)] add r3,r3,r4 mov r14,r14,ror#16 eor r10,r10,r2,ror#16 eor r14,r14,r3,ror#16 add r8,r8,r10 mov r7,r7,ror#20 add r9,r9,r14 mov r4,r4,ror#20 eor r7,r7,r8,ror#20 eor r4,r4,r9,ror#20 add r2,r2,r7 mov r10,r10,ror#24 add r3,r3,r4 mov r14,r14,ror#24 eor r10,r10,r2,ror#24 eor r14,r14,r3,ror#24 add r8,r8,r10 mov r7,r7,ror#25 add r9,r9,r14 mov r4,r4,ror#25 eor r7,r7,r8,ror#25 eor r4,r4,r9,ror#25 bne .Loop ldr r11,[sp,#4*(32+2)] @ load len str r8, [sp,#4*(16+8)] @ modulo-scheduled store str r9, [sp,#4*(16+9)] str r12,[sp,#4*(16+12)] str r10, [sp,#4*(16+13)] str r14,[sp,#4*(16+14)] @ at this point we have first half of 512-bit result in @ rx and second half at sp+4*(16+8) cmp r11,#64 @ done yet? #ifdef __thumb2__ itete lo #endif addlo r12,sp,#4*(0) @ shortcut or ... ldrhs r12,[sp,#4*(32+1)] @ ... load inp addlo r14,sp,#4*(0) @ shortcut or ... ldrhs r14,[sp,#4*(32+0)] @ ... load out ldr r8,[sp,#4*(0)] @ load key material ldr r9,[sp,#4*(1)] #if __ARM_ARCH>=6 || !defined(__ARMEB__) # if __ARM_ARCH<7 orr r10,r12,r14 tst r10,#3 @ are input and output aligned? ldr r10,[sp,#4*(2)] bne .Lunaligned cmp r11,#64 @ restore flags # else ldr r10,[sp,#4*(2)] # endif ldr r11,[sp,#4*(3)] add r0,r0,r8 @ accumulate key material add r1,r1,r9 # ifdef __thumb2__ itt hs # endif ldrhs r8,[r12],#16 @ load input ldrhs r9,[r12,#-12] add r2,r2,r10 add r3,r3,r11 # ifdef __thumb2__ itt hs # endif ldrhs r10,[r12,#-8] ldrhs r11,[r12,#-4] # if __ARM_ARCH>=6 && defined(__ARMEB__) rev r0,r0 rev r1,r1 rev r2,r2 rev r3,r3 # endif # ifdef __thumb2__ itt hs # endif eorhs r0,r0,r8 @ xor with input eorhs r1,r1,r9 add r8,sp,#4*(4) str r0,[r14],#16 @ store output # ifdef __thumb2__ itt hs # endif eorhs r2,r2,r10 eorhs r3,r3,r11 ldmia r8,{r8,r9,r10,r11} @ load key material str r1,[r14,#-12] str r2,[r14,#-8] str r3,[r14,#-4] add r4,r4,r8 @ accumulate key material add r5,r5,r9 # ifdef __thumb2__ itt hs # endif ldrhs r8,[r12],#16 @ load input ldrhs r9,[r12,#-12] add r6,r6,r10 add r7,r7,r11 # ifdef __thumb2__ itt hs # endif ldrhs r10,[r12,#-8] ldrhs r11,[r12,#-4] # if __ARM_ARCH>=6 && defined(__ARMEB__) rev r4,r4 rev r5,r5 rev r6,r6 rev r7,r7 # endif # ifdef __thumb2__ itt hs # endif eorhs r4,r4,r8 eorhs r5,r5,r9 add r8,sp,#4*(8) str r4,[r14],#16 @ store output # ifdef __thumb2__ itt hs # endif eorhs r6,r6,r10 eorhs r7,r7,r11 str r5,[r14,#-12] ldmia r8,{r8,r9,r10,r11} @ load key material str r6,[r14,#-8] add r0,sp,#4*(16+8) str r7,[r14,#-4] ldmia r0,{r0,r1,r2,r3,r4,r5,r6,r7} @ load second half add r0,r0,r8 @ accumulate key material add r1,r1,r9 # ifdef __thumb2__ itt hs # endif ldrhs r8,[r12],#16 @ load input ldrhs r9,[r12,#-12] # ifdef __thumb2__ itt hi # endif strhi r10,[sp,#4*(16+10)] @ copy "rx" while at it strhi r11,[sp,#4*(16+11)] @ copy "rx" while at it add r2,r2,r10 add r3,r3,r11 # ifdef __thumb2__ itt hs # endif ldrhs r10,[r12,#-8] ldrhs r11,[r12,#-4] # if __ARM_ARCH>=6 && defined(__ARMEB__) rev r0,r0 rev r1,r1 rev r2,r2 rev r3,r3 # endif # ifdef __thumb2__ itt hs # endif eorhs r0,r0,r8 eorhs r1,r1,r9 add r8,sp,#4*(12) str r0,[r14],#16 @ store output # ifdef __thumb2__ itt hs # endif eorhs r2,r2,r10 eorhs r3,r3,r11 str r1,[r14,#-12] ldmia r8,{r8,r9,r10,r11} @ load key material str r2,[r14,#-8] str r3,[r14,#-4] add r4,r4,r8 @ accumulate key material add r5,r5,r9 # ifdef __thumb2__ itt hi # endif addhi r8,r8,#1 @ next counter value strhi r8,[sp,#4*(12)] @ save next counter value # ifdef __thumb2__ itt hs # endif ldrhs r8,[r12],#16 @ load input ldrhs r9,[r12,#-12] add r6,r6,r10 add r7,r7,r11 # ifdef __thumb2__ itt hs # endif ldrhs r10,[r12,#-8] ldrhs r11,[r12,#-4] # if __ARM_ARCH>=6 && defined(__ARMEB__) rev r4,r4 rev r5,r5 rev r6,r6 rev r7,r7 # endif # ifdef __thumb2__ itt hs # endif eorhs r4,r4,r8 eorhs r5,r5,r9 # ifdef __thumb2__ it ne # endif ldrne r8,[sp,#4*(32+2)] @ re-load len # ifdef __thumb2__ itt hs # endif eorhs r6,r6,r10 eorhs r7,r7,r11 str r4,[r14],#16 @ store output str r5,[r14,#-12] # ifdef __thumb2__ it hs # endif subhs r11,r8,#64 @ len-=64 str r6,[r14,#-8] str r7,[r14,#-4] bhi .Loop_outer beq .Ldone # if __ARM_ARCH<7 b .Ltail .align 4 .Lunaligned:@ unaligned endian-neutral path cmp r11,#64 @ restore flags # endif #endif #if __ARM_ARCH<7 ldr r11,[sp,#4*(3)] add r0,r0,r8 @ accumulate key material add r1,r1,r9 add r2,r2,r10 # ifdef __thumb2__ itete lo # endif eorlo r8,r8,r8 @ zero or ... ldrhsb r8,[r12],#16 @ ... load input eorlo r9,r9,r9 ldrhsb r9,[r12,#-12] add r3,r3,r11 # ifdef __thumb2__ itete lo # endif eorlo r10,r10,r10 ldrhsb r10,[r12,#-8] eorlo r11,r11,r11 ldrhsb r11,[r12,#-4] eor r0,r8,r0 @ xor with input (or zero) eor r1,r9,r1 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-15] @ load more input ldrhsb r9,[r12,#-11] eor r2,r10,r2 strb r0,[r14],#16 @ store output eor r3,r11,r3 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-7] ldrhsb r11,[r12,#-3] strb r1,[r14,#-12] eor r0,r8,r0,lsr#8 strb r2,[r14,#-8] eor r1,r9,r1,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-14] @ load more input ldrhsb r9,[r12,#-10] strb r3,[r14,#-4] eor r2,r10,r2,lsr#8 strb r0,[r14,#-15] eor r3,r11,r3,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-6] ldrhsb r11,[r12,#-2] strb r1,[r14,#-11] eor r0,r8,r0,lsr#8 strb r2,[r14,#-7] eor r1,r9,r1,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-13] @ load more input ldrhsb r9,[r12,#-9] strb r3,[r14,#-3] eor r2,r10,r2,lsr#8 strb r0,[r14,#-14] eor r3,r11,r3,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-5] ldrhsb r11,[r12,#-1] strb r1,[r14,#-10] strb r2,[r14,#-6] eor r0,r8,r0,lsr#8 strb r3,[r14,#-2] eor r1,r9,r1,lsr#8 strb r0,[r14,#-13] eor r2,r10,r2,lsr#8 strb r1,[r14,#-9] eor r3,r11,r3,lsr#8 strb r2,[r14,#-5] strb r3,[r14,#-1] add r8,sp,#4*(4+0) ldmia r8,{r8,r9,r10,r11} @ load key material add r0,sp,#4*(16+8) add r4,r4,r8 @ accumulate key material add r5,r5,r9 add r6,r6,r10 # ifdef __thumb2__ itete lo # endif eorlo r8,r8,r8 @ zero or ... ldrhsb r8,[r12],#16 @ ... load input eorlo r9,r9,r9 ldrhsb r9,[r12,#-12] add r7,r7,r11 # ifdef __thumb2__ itete lo # endif eorlo r10,r10,r10 ldrhsb r10,[r12,#-8] eorlo r11,r11,r11 ldrhsb r11,[r12,#-4] eor r4,r8,r4 @ xor with input (or zero) eor r5,r9,r5 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-15] @ load more input ldrhsb r9,[r12,#-11] eor r6,r10,r6 strb r4,[r14],#16 @ store output eor r7,r11,r7 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-7] ldrhsb r11,[r12,#-3] strb r5,[r14,#-12] eor r4,r8,r4,lsr#8 strb r6,[r14,#-8] eor r5,r9,r5,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-14] @ load more input ldrhsb r9,[r12,#-10] strb r7,[r14,#-4] eor r6,r10,r6,lsr#8 strb r4,[r14,#-15] eor r7,r11,r7,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-6] ldrhsb r11,[r12,#-2] strb r5,[r14,#-11] eor r4,r8,r4,lsr#8 strb r6,[r14,#-7] eor r5,r9,r5,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-13] @ load more input ldrhsb r9,[r12,#-9] strb r7,[r14,#-3] eor r6,r10,r6,lsr#8 strb r4,[r14,#-14] eor r7,r11,r7,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-5] ldrhsb r11,[r12,#-1] strb r5,[r14,#-10] strb r6,[r14,#-6] eor r4,r8,r4,lsr#8 strb r7,[r14,#-2] eor r5,r9,r5,lsr#8 strb r4,[r14,#-13] eor r6,r10,r6,lsr#8 strb r5,[r14,#-9] eor r7,r11,r7,lsr#8 strb r6,[r14,#-5] strb r7,[r14,#-1] add r8,sp,#4*(4+4) ldmia r8,{r8,r9,r10,r11} @ load key material ldmia r0,{r0,r1,r2,r3,r4,r5,r6,r7} @ load second half # ifdef __thumb2__ itt hi # endif strhi r10,[sp,#4*(16+10)] @ copy "rx" strhi r11,[sp,#4*(16+11)] @ copy "rx" add r0,r0,r8 @ accumulate key material add r1,r1,r9 add r2,r2,r10 # ifdef __thumb2__ itete lo # endif eorlo r8,r8,r8 @ zero or ... ldrhsb r8,[r12],#16 @ ... load input eorlo r9,r9,r9 ldrhsb r9,[r12,#-12] add r3,r3,r11 # ifdef __thumb2__ itete lo # endif eorlo r10,r10,r10 ldrhsb r10,[r12,#-8] eorlo r11,r11,r11 ldrhsb r11,[r12,#-4] eor r0,r8,r0 @ xor with input (or zero) eor r1,r9,r1 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-15] @ load more input ldrhsb r9,[r12,#-11] eor r2,r10,r2 strb r0,[r14],#16 @ store output eor r3,r11,r3 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-7] ldrhsb r11,[r12,#-3] strb r1,[r14,#-12] eor r0,r8,r0,lsr#8 strb r2,[r14,#-8] eor r1,r9,r1,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-14] @ load more input ldrhsb r9,[r12,#-10] strb r3,[r14,#-4] eor r2,r10,r2,lsr#8 strb r0,[r14,#-15] eor r3,r11,r3,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-6] ldrhsb r11,[r12,#-2] strb r1,[r14,#-11] eor r0,r8,r0,lsr#8 strb r2,[r14,#-7] eor r1,r9,r1,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-13] @ load more input ldrhsb r9,[r12,#-9] strb r3,[r14,#-3] eor r2,r10,r2,lsr#8 strb r0,[r14,#-14] eor r3,r11,r3,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-5] ldrhsb r11,[r12,#-1] strb r1,[r14,#-10] strb r2,[r14,#-6] eor r0,r8,r0,lsr#8 strb r3,[r14,#-2] eor r1,r9,r1,lsr#8 strb r0,[r14,#-13] eor r2,r10,r2,lsr#8 strb r1,[r14,#-9] eor r3,r11,r3,lsr#8 strb r2,[r14,#-5] strb r3,[r14,#-1] add r8,sp,#4*(4+8) ldmia r8,{r8,r9,r10,r11} @ load key material add r4,r4,r8 @ accumulate key material # ifdef __thumb2__ itt hi # endif addhi r8,r8,#1 @ next counter value strhi r8,[sp,#4*(12)] @ save next counter value add r5,r5,r9 add r6,r6,r10 # ifdef __thumb2__ itete lo # endif eorlo r8,r8,r8 @ zero or ... ldrhsb r8,[r12],#16 @ ... load input eorlo r9,r9,r9 ldrhsb r9,[r12,#-12] add r7,r7,r11 # ifdef __thumb2__ itete lo # endif eorlo r10,r10,r10 ldrhsb r10,[r12,#-8] eorlo r11,r11,r11 ldrhsb r11,[r12,#-4] eor r4,r8,r4 @ xor with input (or zero) eor r5,r9,r5 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-15] @ load more input ldrhsb r9,[r12,#-11] eor r6,r10,r6 strb r4,[r14],#16 @ store output eor r7,r11,r7 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-7] ldrhsb r11,[r12,#-3] strb r5,[r14,#-12] eor r4,r8,r4,lsr#8 strb r6,[r14,#-8] eor r5,r9,r5,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-14] @ load more input ldrhsb r9,[r12,#-10] strb r7,[r14,#-4] eor r6,r10,r6,lsr#8 strb r4,[r14,#-15] eor r7,r11,r7,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-6] ldrhsb r11,[r12,#-2] strb r5,[r14,#-11] eor r4,r8,r4,lsr#8 strb r6,[r14,#-7] eor r5,r9,r5,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r8,[r12,#-13] @ load more input ldrhsb r9,[r12,#-9] strb r7,[r14,#-3] eor r6,r10,r6,lsr#8 strb r4,[r14,#-14] eor r7,r11,r7,lsr#8 # ifdef __thumb2__ itt hs # endif ldrhsb r10,[r12,#-5] ldrhsb r11,[r12,#-1] strb r5,[r14,#-10] strb r6,[r14,#-6] eor r4,r8,r4,lsr#8 strb r7,[r14,#-2] eor r5,r9,r5,lsr#8 strb r4,[r14,#-13] eor r6,r10,r6,lsr#8 strb r5,[r14,#-9] eor r7,r11,r7,lsr#8 strb r6,[r14,#-5] strb r7,[r14,#-1] # ifdef __thumb2__ it ne # endif ldrne r8,[sp,#4*(32+2)] @ re-load len # ifdef __thumb2__ it hs # endif subhs r11,r8,#64 @ len-=64 bhi .Loop_outer beq .Ldone #endif .Ltail: ldr r12,[sp,#4*(32+1)] @ load inp add r9,sp,#4*(0) ldr r14,[sp,#4*(32+0)] @ load out .Loop_tail: ldrb r10,[r9],#1 @ read buffer on stack ldrb r11,[r12],#1 @ read input subs r8,r8,#1 eor r11,r11,r10 strb r11,[r14],#1 @ store output bne .Loop_tail .Ldone: add sp,sp,#4*(32+3) ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,pc} .size ChaCha20_ctr32_nohw,.-ChaCha20_ctr32_nohw #if __ARM_MAX_ARCH__>=7 .arch armv7-a .fpu neon .globl ChaCha20_ctr32_neon .hidden ChaCha20_ctr32_neon .type ChaCha20_ctr32_neon,%function .align 5 ChaCha20_ctr32_neon: ldr r12,[sp,#0] @ pull pointer to counter and nonce stmdb sp!,{r0,r1,r2,r4-r11,lr} adr r14,.Lsigma vstmdb sp!,{d8,d9,d10,d11,d12,d13,d14,d15} @ ABI spec says so stmdb sp!,{r0,r1,r2,r3} vld1.32 {q1,q2},[r3] @ load key ldmia r3,{r4,r5,r6,r7,r8,r9,r10,r11} @ load key sub sp,sp,#4*(16+16) vld1.32 {q3},[r12] @ load counter and nonce add r12,sp,#4*8 ldmia r14,{r0,r1,r2,r3} @ load sigma vld1.32 {q0},[r14]! @ load sigma vld1.32 {q12},[r14] @ one vst1.32 {q2,q3},[r12] @ copy 1/2key|counter|nonce vst1.32 {q0,q1},[sp] @ copy sigma|1/2key str r10,[sp,#4*(16+10)] @ off-load "rx" str r11,[sp,#4*(16+11)] @ off-load "rx" vshl.i32 d26,d24,#1 @ two vstr d24,[sp,#4*(16+0)] vshl.i32 d28,d24,#2 @ four vstr d26,[sp,#4*(16+2)] vmov q4,q0 vstr d28,[sp,#4*(16+4)] vmov q8,q0 vmov q5,q1 vmov q9,q1 b .Loop_neon_enter .align 4 .Loop_neon_outer: ldmia sp,{r0,r1,r2,r3,r4,r5,r6,r7,r8,r9} @ load key material cmp r11,#64*2 @ if len<=64*2 bls .Lbreak_neon @ switch to integer-only vmov q4,q0 str r11,[sp,#4*(32+2)] @ save len vmov q8,q0 str r12, [sp,#4*(32+1)] @ save inp vmov q5,q1 str r14, [sp,#4*(32+0)] @ save out vmov q9,q1 .Loop_neon_enter: ldr r11, [sp,#4*(15)] vadd.i32 q7,q3,q12 @ counter+1 ldr r12,[sp,#4*(12)] @ modulo-scheduled load vmov q6,q2 ldr r10, [sp,#4*(13)] vmov q10,q2 ldr r14,[sp,#4*(14)] vadd.i32 q11,q7,q12 @ counter+2 str r11, [sp,#4*(16+15)] mov r11,#10 add r12,r12,#3 @ counter+3 b .Loop_neon .align 4 .Loop_neon: subs r11,r11,#1 vadd.i32 q0,q0,q1 add r0,r0,r4 vadd.i32 q4,q4,q5 mov r12,r12,ror#16 vadd.i32 q8,q8,q9 add r1,r1,r5 veor q3,q3,q0 mov r10,r10,ror#16 veor q7,q7,q4 eor r12,r12,r0,ror#16 veor q11,q11,q8 eor r10,r10,r1,ror#16 vrev32.16 q3,q3 add r8,r8,r12 vrev32.16 q7,q7 mov r4,r4,ror#20 vrev32.16 q11,q11 add r9,r9,r10 vadd.i32 q2,q2,q3 mov r5,r5,ror#20 vadd.i32 q6,q6,q7 eor r4,r4,r8,ror#20 vadd.i32 q10,q10,q11 eor r5,r5,r9,ror#20 veor q12,q1,q2 add r0,r0,r4 veor q13,q5,q6 mov r12,r12,ror#24 veor q14,q9,q10 add r1,r1,r5 vshr.u32 q1,q12,#20 mov r10,r10,ror#24 vshr.u32 q5,q13,#20 eor r12,r12,r0,ror#24 vshr.u32 q9,q14,#20 eor r10,r10,r1,ror#24 vsli.32 q1,q12,#12 add r8,r8,r12 vsli.32 q5,q13,#12 mov r4,r4,ror#25 vsli.32 q9,q14,#12 add r9,r9,r10 vadd.i32 q0,q0,q1 mov r5,r5,ror#25 vadd.i32 q4,q4,q5 str r10,[sp,#4*(16+13)] vadd.i32 q8,q8,q9 ldr r10,[sp,#4*(16+15)] veor q12,q3,q0 eor r4,r4,r8,ror#25 veor q13,q7,q4 eor r5,r5,r9,ror#25 veor q14,q11,q8 str r8,[sp,#4*(16+8)] vshr.u32 q3,q12,#24 ldr r8,[sp,#4*(16+10)] vshr.u32 q7,q13,#24 add r2,r2,r6 vshr.u32 q11,q14,#24 mov r14,r14,ror#16 vsli.32 q3,q12,#8 str r9,[sp,#4*(16+9)] vsli.32 q7,q13,#8 ldr r9,[sp,#4*(16+11)] vsli.32 q11,q14,#8 add r3,r3,r7 vadd.i32 q2,q2,q3 mov r10,r10,ror#16 vadd.i32 q6,q6,q7 eor r14,r14,r2,ror#16 vadd.i32 q10,q10,q11 eor r10,r10,r3,ror#16 veor q12,q1,q2 add r8,r8,r14 veor q13,q5,q6 mov r6,r6,ror#20 veor q14,q9,q10 add r9,r9,r10 vshr.u32 q1,q12,#25 mov r7,r7,ror#20 vshr.u32 q5,q13,#25 eor r6,r6,r8,ror#20 vshr.u32 q9,q14,#25 eor r7,r7,r9,ror#20 vsli.32 q1,q12,#7 add r2,r2,r6 vsli.32 q5,q13,#7 mov r14,r14,ror#24 vsli.32 q9,q14,#7 add r3,r3,r7 vext.8 q2,q2,q2,#8 mov r10,r10,ror#24 vext.8 q6,q6,q6,#8 eor r14,r14,r2,ror#24 vext.8 q10,q10,q10,#8 eor r10,r10,r3,ror#24 vext.8 q1,q1,q1,#4 add r8,r8,r14 vext.8 q5,q5,q5,#4 mov r6,r6,ror#25 vext.8 q9,q9,q9,#4 add r9,r9,r10 vext.8 q3,q3,q3,#12 mov r7,r7,ror#25 vext.8 q7,q7,q7,#12 eor r6,r6,r8,ror#25 vext.8 q11,q11,q11,#12 eor r7,r7,r9,ror#25 vadd.i32 q0,q0,q1 add r0,r0,r5 vadd.i32 q4,q4,q5 mov r10,r10,ror#16 vadd.i32 q8,q8,q9 add r1,r1,r6 veor q3,q3,q0 mov r12,r12,ror#16 veor q7,q7,q4 eor r10,r10,r0,ror#16 veor q11,q11,q8 eor r12,r12,r1,ror#16 vrev32.16 q3,q3 add r8,r8,r10 vrev32.16 q7,q7 mov r5,r5,ror#20 vrev32.16 q11,q11 add r9,r9,r12 vadd.i32 q2,q2,q3 mov r6,r6,ror#20 vadd.i32 q6,q6,q7 eor r5,r5,r8,ror#20 vadd.i32 q10,q10,q11 eor r6,r6,r9,ror#20 veor q12,q1,q2 add r0,r0,r5 veor q13,q5,q6 mov r10,r10,ror#24 veor q14,q9,q10 add r1,r1,r6 vshr.u32 q1,q12,#20 mov r12,r12,ror#24 vshr.u32 q5,q13,#20 eor r10,r10,r0,ror#24 vshr.u32 q9,q14,#20 eor r12,r12,r1,ror#24 vsli.32 q1,q12,#12 add r8,r8,r10 vsli.32 q5,q13,#12 mov r5,r5,ror#25 vsli.32 q9,q14,#12 str r10,[sp,#4*(16+15)] vadd.i32 q0,q0,q1 ldr r10,[sp,#4*(16+13)] vadd.i32 q4,q4,q5 add r9,r9,r12 vadd.i32 q8,q8,q9 mov r6,r6,ror#25 veor q12,q3,q0 eor r5,r5,r8,ror#25 veor q13,q7,q4 eor r6,r6,r9,ror#25 veor q14,q11,q8 str r8,[sp,#4*(16+10)] vshr.u32 q3,q12,#24 ldr r8,[sp,#4*(16+8)] vshr.u32 q7,q13,#24 add r2,r2,r7 vshr.u32 q11,q14,#24 mov r10,r10,ror#16 vsli.32 q3,q12,#8 str r9,[sp,#4*(16+11)] vsli.32 q7,q13,#8 ldr r9,[sp,#4*(16+9)] vsli.32 q11,q14,#8 add r3,r3,r4 vadd.i32 q2,q2,q3 mov r14,r14,ror#16 vadd.i32 q6,q6,q7 eor r10,r10,r2,ror#16 vadd.i32 q10,q10,q11 eor r14,r14,r3,ror#16 veor q12,q1,q2 add r8,r8,r10 veor q13,q5,q6 mov r7,r7,ror#20 veor q14,q9,q10 add r9,r9,r14 vshr.u32 q1,q12,#25 mov r4,r4,ror#20 vshr.u32 q5,q13,#25 eor r7,r7,r8,ror#20 vshr.u32 q9,q14,#25 eor r4,r4,r9,ror#20 vsli.32 q1,q12,#7 add r2,r2,r7 vsli.32 q5,q13,#7 mov r10,r10,ror#24 vsli.32 q9,q14,#7 add r3,r3,r4 vext.8 q2,q2,q2,#8 mov r14,r14,ror#24 vext.8 q6,q6,q6,#8 eor r10,r10,r2,ror#24 vext.8 q10,q10,q10,#8 eor r14,r14,r3,ror#24 vext.8 q1,q1,q1,#12 add r8,r8,r10 vext.8 q5,q5,q5,#12 mov r7,r7,ror#25 vext.8 q9,q9,q9,#12 add r9,r9,r14 vext.8 q3,q3,q3,#4 mov r4,r4,ror#25 vext.8 q7,q7,q7,#4 eor r7,r7,r8,ror#25 vext.8 q11,q11,q11,#4 eor r4,r4,r9,ror#25 bne .Loop_neon add r11,sp,#32 vld1.32 {q12,q13},[sp] @ load key material vld1.32 {q14,q15},[r11] ldr r11,[sp,#4*(32+2)] @ load len str r8, [sp,#4*(16+8)] @ modulo-scheduled store str r9, [sp,#4*(16+9)] str r12,[sp,#4*(16+12)] str r10, [sp,#4*(16+13)] str r14,[sp,#4*(16+14)] @ at this point we have first half of 512-bit result in @ rx and second half at sp+4*(16+8) ldr r12,[sp,#4*(32+1)] @ load inp ldr r14,[sp,#4*(32+0)] @ load out vadd.i32 q0,q0,q12 @ accumulate key material vadd.i32 q4,q4,q12 vadd.i32 q8,q8,q12 vldr d24,[sp,#4*(16+0)] @ one vadd.i32 q1,q1,q13 vadd.i32 q5,q5,q13 vadd.i32 q9,q9,q13 vldr d26,[sp,#4*(16+2)] @ two vadd.i32 q2,q2,q14 vadd.i32 q6,q6,q14 vadd.i32 q10,q10,q14 vadd.i32 d14,d14,d24 @ counter+1 vadd.i32 d22,d22,d26 @ counter+2 vadd.i32 q3,q3,q15 vadd.i32 q7,q7,q15 vadd.i32 q11,q11,q15 cmp r11,#64*4 blo .Ltail_neon vld1.8 {q12,q13},[r12]! @ load input mov r11,sp vld1.8 {q14,q15},[r12]! veor q0,q0,q12 @ xor with input veor q1,q1,q13 vld1.8 {q12,q13},[r12]! veor q2,q2,q14 veor q3,q3,q15 vld1.8 {q14,q15},[r12]! veor q4,q4,q12 vst1.8 {q0,q1},[r14]! @ store output veor q5,q5,q13 vld1.8 {q12,q13},[r12]! veor q6,q6,q14 vst1.8 {q2,q3},[r14]! veor q7,q7,q15 vld1.8 {q14,q15},[r12]! veor q8,q8,q12 vld1.32 {q0,q1},[r11]! @ load for next iteration veor d25,d25,d25 vldr d24,[sp,#4*(16+4)] @ four veor q9,q9,q13 vld1.32 {q2,q3},[r11] veor q10,q10,q14 vst1.8 {q4,q5},[r14]! veor q11,q11,q15 vst1.8 {q6,q7},[r14]! vadd.i32 d6,d6,d24 @ next counter value vldr d24,[sp,#4*(16+0)] @ one ldmia sp,{r8,r9,r10,r11} @ load key material add r0,r0,r8 @ accumulate key material ldr r8,[r12],#16 @ load input vst1.8 {q8,q9},[r14]! add r1,r1,r9 ldr r9,[r12,#-12] vst1.8 {q10,q11},[r14]! add r2,r2,r10 ldr r10,[r12,#-8] add r3,r3,r11 ldr r11,[r12,#-4] # ifdef __ARMEB__ rev r0,r0 rev r1,r1 rev r2,r2 rev r3,r3 # endif eor r0,r0,r8 @ xor with input add r8,sp,#4*(4) eor r1,r1,r9 str r0,[r14],#16 @ store output eor r2,r2,r10 str r1,[r14,#-12] eor r3,r3,r11 ldmia r8,{r8,r9,r10,r11} @ load key material str r2,[r14,#-8] str r3,[r14,#-4] add r4,r4,r8 @ accumulate key material ldr r8,[r12],#16 @ load input add r5,r5,r9 ldr r9,[r12,#-12] add r6,r6,r10 ldr r10,[r12,#-8] add r7,r7,r11 ldr r11,[r12,#-4] # ifdef __ARMEB__ rev r4,r4 rev r5,r5 rev r6,r6 rev r7,r7 # endif eor r4,r4,r8 add r8,sp,#4*(8) eor r5,r5,r9 str r4,[r14],#16 @ store output eor r6,r6,r10 str r5,[r14,#-12] eor r7,r7,r11 ldmia r8,{r8,r9,r10,r11} @ load key material str r6,[r14,#-8] add r0,sp,#4*(16+8) str r7,[r14,#-4] ldmia r0,{r0,r1,r2,r3,r4,r5,r6,r7} @ load second half add r0,r0,r8 @ accumulate key material ldr r8,[r12],#16 @ load input add r1,r1,r9 ldr r9,[r12,#-12] # ifdef __thumb2__ it hi # endif strhi r10,[sp,#4*(16+10)] @ copy "rx" while at it add r2,r2,r10 ldr r10,[r12,#-8] # ifdef __thumb2__ it hi # endif strhi r11,[sp,#4*(16+11)] @ copy "rx" while at it add r3,r3,r11 ldr r11,[r12,#-4] # ifdef __ARMEB__ rev r0,r0 rev r1,r1 rev r2,r2 rev r3,r3 # endif eor r0,r0,r8 add r8,sp,#4*(12) eor r1,r1,r9 str r0,[r14],#16 @ store output eor r2,r2,r10 str r1,[r14,#-12] eor r3,r3,r11 ldmia r8,{r8,r9,r10,r11} @ load key material str r2,[r14,#-8] str r3,[r14,#-4] add r4,r4,r8 @ accumulate key material add r8,r8,#4 @ next counter value add r5,r5,r9 str r8,[sp,#4*(12)] @ save next counter value ldr r8,[r12],#16 @ load input add r6,r6,r10 add r4,r4,#3 @ counter+3 ldr r9,[r12,#-12] add r7,r7,r11 ldr r10,[r12,#-8] ldr r11,[r12,#-4] # ifdef __ARMEB__ rev r4,r4 rev r5,r5 rev r6,r6 rev r7,r7 # endif eor r4,r4,r8 # ifdef __thumb2__ it hi # endif ldrhi r8,[sp,#4*(32+2)] @ re-load len eor r5,r5,r9 eor r6,r6,r10 str r4,[r14],#16 @ store output eor r7,r7,r11 str r5,[r14,#-12] sub r11,r8,#64*4 @ len-=64*4 str r6,[r14,#-8] str r7,[r14,#-4] bhi .Loop_neon_outer b .Ldone_neon .align 4 .Lbreak_neon: @ harmonize NEON and integer-only stack frames: load data @ from NEON frame, but save to integer-only one; distance @ between the two is 4*(32+4+16-32)=4*(20). str r11, [sp,#4*(20+32+2)] @ save len add r11,sp,#4*(32+4) str r12, [sp,#4*(20+32+1)] @ save inp str r14, [sp,#4*(20+32+0)] @ save out ldr r12,[sp,#4*(16+10)] ldr r14,[sp,#4*(16+11)] vldmia r11,{d8,d9,d10,d11,d12,d13,d14,d15} @ fulfill ABI requirement str r12,[sp,#4*(20+16+10)] @ copy "rx" str r14,[sp,#4*(20+16+11)] @ copy "rx" ldr r11, [sp,#4*(15)] ldr r12,[sp,#4*(12)] @ modulo-scheduled load ldr r10, [sp,#4*(13)] ldr r14,[sp,#4*(14)] str r11, [sp,#4*(20+16+15)] add r11,sp,#4*(20) vst1.32 {q0,q1},[r11]! @ copy key add sp,sp,#4*(20) @ switch frame vst1.32 {q2,q3},[r11] mov r11,#10 b .Loop @ go integer-only .align 4 .Ltail_neon: cmp r11,#64*3 bhs .L192_or_more_neon cmp r11,#64*2 bhs .L128_or_more_neon cmp r11,#64*1 bhs .L64_or_more_neon add r8,sp,#4*(8) vst1.8 {q0,q1},[sp] add r10,sp,#4*(0) vst1.8 {q2,q3},[r8] b .Loop_tail_neon .align 4 .L64_or_more_neon: vld1.8 {q12,q13},[r12]! vld1.8 {q14,q15},[r12]! veor q0,q0,q12 veor q1,q1,q13 veor q2,q2,q14 veor q3,q3,q15 vst1.8 {q0,q1},[r14]! vst1.8 {q2,q3},[r14]! beq .Ldone_neon add r8,sp,#4*(8) vst1.8 {q4,q5},[sp] add r10,sp,#4*(0) vst1.8 {q6,q7},[r8] sub r11,r11,#64*1 @ len-=64*1 b .Loop_tail_neon .align 4 .L128_or_more_neon: vld1.8 {q12,q13},[r12]! vld1.8 {q14,q15},[r12]! veor q0,q0,q12 veor q1,q1,q13 vld1.8 {q12,q13},[r12]! veor q2,q2,q14 veor q3,q3,q15 vld1.8 {q14,q15},[r12]! veor q4,q4,q12 veor q5,q5,q13 vst1.8 {q0,q1},[r14]! veor q6,q6,q14 vst1.8 {q2,q3},[r14]! veor q7,q7,q15 vst1.8 {q4,q5},[r14]! vst1.8 {q6,q7},[r14]! beq .Ldone_neon add r8,sp,#4*(8) vst1.8 {q8,q9},[sp] add r10,sp,#4*(0) vst1.8 {q10,q11},[r8] sub r11,r11,#64*2 @ len-=64*2 b .Loop_tail_neon .align 4 .L192_or_more_neon: vld1.8 {q12,q13},[r12]! vld1.8 {q14,q15},[r12]! veor q0,q0,q12 veor q1,q1,q13 vld1.8 {q12,q13},[r12]! veor q2,q2,q14 veor q3,q3,q15 vld1.8 {q14,q15},[r12]! veor q4,q4,q12 veor q5,q5,q13 vld1.8 {q12,q13},[r12]! veor q6,q6,q14 vst1.8 {q0,q1},[r14]! veor q7,q7,q15 vld1.8 {q14,q15},[r12]! veor q8,q8,q12 vst1.8 {q2,q3},[r14]! veor q9,q9,q13 vst1.8 {q4,q5},[r14]! veor q10,q10,q14 vst1.8 {q6,q7},[r14]! veor q11,q11,q15 vst1.8 {q8,q9},[r14]! vst1.8 {q10,q11},[r14]! beq .Ldone_neon ldmia sp,{r8,r9,r10,r11} @ load key material add r0,r0,r8 @ accumulate key material add r8,sp,#4*(4) add r1,r1,r9 add r2,r2,r10 add r3,r3,r11 ldmia r8,{r8,r9,r10,r11} @ load key material add r4,r4,r8 @ accumulate key material add r8,sp,#4*(8) add r5,r5,r9 add r6,r6,r10 add r7,r7,r11 ldmia r8,{r8,r9,r10,r11} @ load key material # ifdef __ARMEB__ rev r0,r0 rev r1,r1 rev r2,r2 rev r3,r3 rev r4,r4 rev r5,r5 rev r6,r6 rev r7,r7 # endif stmia sp,{r0,r1,r2,r3,r4,r5,r6,r7} add r0,sp,#4*(16+8) ldmia r0,{r0,r1,r2,r3,r4,r5,r6,r7} @ load second half add r0,r0,r8 @ accumulate key material add r8,sp,#4*(12) add r1,r1,r9 add r2,r2,r10 add r3,r3,r11 ldmia r8,{r8,r9,r10,r11} @ load key material add r4,r4,r8 @ accumulate key material add r8,sp,#4*(8) add r5,r5,r9 add r4,r4,#3 @ counter+3 add r6,r6,r10 add r7,r7,r11 ldr r11,[sp,#4*(32+2)] @ re-load len # ifdef __ARMEB__ rev r0,r0 rev r1,r1 rev r2,r2 rev r3,r3 rev r4,r4 rev r5,r5 rev r6,r6 rev r7,r7 # endif stmia r8,{r0,r1,r2,r3,r4,r5,r6,r7} add r10,sp,#4*(0) sub r11,r11,#64*3 @ len-=64*3 .Loop_tail_neon: ldrb r8,[r10],#1 @ read buffer on stack ldrb r9,[r12],#1 @ read input subs r11,r11,#1 eor r8,r8,r9 strb r8,[r14],#1 @ store output bne .Loop_tail_neon .Ldone_neon: add sp,sp,#4*(32+4) vldmia sp,{d8,d9,d10,d11,d12,d13,d14,d15} add sp,sp,#4*(16+3) ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,r11,pc} .size ChaCha20_ctr32_neon,.-ChaCha20_ctr32_neon #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_ARM) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
19,011
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/test/trampoline-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .text // abi_test_trampoline loads callee-saved registers from |state|, calls |func| // with |argv|, then saves the callee-saved registers into |state|. It returns // the result of |func|. The |unwind| argument is unused. // uint64_t abi_test_trampoline(void (*func)(...), CallerState *state, // const uint64_t *argv, size_t argc, // uint64_t unwind); .type abi_test_trampoline, %function .globl abi_test_trampoline .hidden abi_test_trampoline .align 4 abi_test_trampoline: .Labi_test_trampoline_begin: AARCH64_SIGN_LINK_REGISTER // Stack layout (low to high addresses) // x29,x30 (16 bytes) // d8-d15 (64 bytes) // x19-x28 (80 bytes) // x1 (8 bytes) // padding (8 bytes) stp x29, x30, [sp, #-176]! mov x29, sp // Saved callee-saved registers and |state|. stp d8, d9, [sp, #16] stp d10, d11, [sp, #32] stp d12, d13, [sp, #48] stp d14, d15, [sp, #64] stp x19, x20, [sp, #80] stp x21, x22, [sp, #96] stp x23, x24, [sp, #112] stp x25, x26, [sp, #128] stp x27, x28, [sp, #144] str x1, [sp, #160] // Load registers from |state|, with the exception of x29. x29 is the // frame pointer and also callee-saved, but AAPCS64 allows platforms to // mandate that x29 always point to a frame. iOS64 does so, which means // we cannot fill x29 with entropy without violating ABI rules // ourselves. x29 is tested separately below. ldp d8, d9, [x1], #16 ldp d10, d11, [x1], #16 ldp d12, d13, [x1], #16 ldp d14, d15, [x1], #16 ldp x19, x20, [x1], #16 ldp x21, x22, [x1], #16 ldp x23, x24, [x1], #16 ldp x25, x26, [x1], #16 ldp x27, x28, [x1], #16 // Move parameters into temporary registers. mov x9, x0 mov x10, x2 mov x11, x3 // Load parameters into registers. cbz x11, .Largs_done ldr x0, [x10], #8 subs x11, x11, #1 b.eq .Largs_done ldr x1, [x10], #8 subs x11, x11, #1 b.eq .Largs_done ldr x2, [x10], #8 subs x11, x11, #1 b.eq .Largs_done ldr x3, [x10], #8 subs x11, x11, #1 b.eq .Largs_done ldr x4, [x10], #8 subs x11, x11, #1 b.eq .Largs_done ldr x5, [x10], #8 subs x11, x11, #1 b.eq .Largs_done ldr x6, [x10], #8 subs x11, x11, #1 b.eq .Largs_done ldr x7, [x10], #8 .Largs_done: blr x9 // Reload |state| and store registers. ldr x1, [sp, #160] stp d8, d9, [x1], #16 stp d10, d11, [x1], #16 stp d12, d13, [x1], #16 stp d14, d15, [x1], #16 stp x19, x20, [x1], #16 stp x21, x22, [x1], #16 stp x23, x24, [x1], #16 stp x25, x26, [x1], #16 stp x27, x28, [x1], #16 // |func| is required to preserve x29, the frame pointer. We cannot load // random values into x29 (see comment above), so compare it against the // expected value and zero the field of |state| if corrupted. mov x9, sp cmp x29, x9 b.eq .Lx29_ok str xzr, [x1] .Lx29_ok: // Restore callee-saved registers. ldp d8, d9, [sp, #16] ldp d10, d11, [sp, #32] ldp d12, d13, [sp, #48] ldp d14, d15, [sp, #64] ldp x19, x20, [sp, #80] ldp x21, x22, [sp, #96] ldp x23, x24, [sp, #112] ldp x25, x26, [sp, #128] ldp x27, x28, [sp, #144] ldp x29, x30, [sp], #176 AARCH64_VALIDATE_LINK_REGISTER ret .size abi_test_trampoline,.-abi_test_trampoline .type abi_test_clobber_x0, %function .globl abi_test_clobber_x0 .hidden abi_test_clobber_x0 .align 4 abi_test_clobber_x0: AARCH64_VALID_CALL_TARGET mov x0, xzr ret .size abi_test_clobber_x0,.-abi_test_clobber_x0 .type abi_test_clobber_x1, %function .globl abi_test_clobber_x1 .hidden abi_test_clobber_x1 .align 4 abi_test_clobber_x1: AARCH64_VALID_CALL_TARGET mov x1, xzr ret .size abi_test_clobber_x1,.-abi_test_clobber_x1 .type abi_test_clobber_x2, %function .globl abi_test_clobber_x2 .hidden abi_test_clobber_x2 .align 4 abi_test_clobber_x2: AARCH64_VALID_CALL_TARGET mov x2, xzr ret .size abi_test_clobber_x2,.-abi_test_clobber_x2 .type abi_test_clobber_x3, %function .globl abi_test_clobber_x3 .hidden abi_test_clobber_x3 .align 4 abi_test_clobber_x3: AARCH64_VALID_CALL_TARGET mov x3, xzr ret .size abi_test_clobber_x3,.-abi_test_clobber_x3 .type abi_test_clobber_x4, %function .globl abi_test_clobber_x4 .hidden abi_test_clobber_x4 .align 4 abi_test_clobber_x4: AARCH64_VALID_CALL_TARGET mov x4, xzr ret .size abi_test_clobber_x4,.-abi_test_clobber_x4 .type abi_test_clobber_x5, %function .globl abi_test_clobber_x5 .hidden abi_test_clobber_x5 .align 4 abi_test_clobber_x5: AARCH64_VALID_CALL_TARGET mov x5, xzr ret .size abi_test_clobber_x5,.-abi_test_clobber_x5 .type abi_test_clobber_x6, %function .globl abi_test_clobber_x6 .hidden abi_test_clobber_x6 .align 4 abi_test_clobber_x6: AARCH64_VALID_CALL_TARGET mov x6, xzr ret .size abi_test_clobber_x6,.-abi_test_clobber_x6 .type abi_test_clobber_x7, %function .globl abi_test_clobber_x7 .hidden abi_test_clobber_x7 .align 4 abi_test_clobber_x7: AARCH64_VALID_CALL_TARGET mov x7, xzr ret .size abi_test_clobber_x7,.-abi_test_clobber_x7 .type abi_test_clobber_x8, %function .globl abi_test_clobber_x8 .hidden abi_test_clobber_x8 .align 4 abi_test_clobber_x8: AARCH64_VALID_CALL_TARGET mov x8, xzr ret .size abi_test_clobber_x8,.-abi_test_clobber_x8 .type abi_test_clobber_x9, %function .globl abi_test_clobber_x9 .hidden abi_test_clobber_x9 .align 4 abi_test_clobber_x9: AARCH64_VALID_CALL_TARGET mov x9, xzr ret .size abi_test_clobber_x9,.-abi_test_clobber_x9 .type abi_test_clobber_x10, %function .globl abi_test_clobber_x10 .hidden abi_test_clobber_x10 .align 4 abi_test_clobber_x10: AARCH64_VALID_CALL_TARGET mov x10, xzr ret .size abi_test_clobber_x10,.-abi_test_clobber_x10 .type abi_test_clobber_x11, %function .globl abi_test_clobber_x11 .hidden abi_test_clobber_x11 .align 4 abi_test_clobber_x11: AARCH64_VALID_CALL_TARGET mov x11, xzr ret .size abi_test_clobber_x11,.-abi_test_clobber_x11 .type abi_test_clobber_x12, %function .globl abi_test_clobber_x12 .hidden abi_test_clobber_x12 .align 4 abi_test_clobber_x12: AARCH64_VALID_CALL_TARGET mov x12, xzr ret .size abi_test_clobber_x12,.-abi_test_clobber_x12 .type abi_test_clobber_x13, %function .globl abi_test_clobber_x13 .hidden abi_test_clobber_x13 .align 4 abi_test_clobber_x13: AARCH64_VALID_CALL_TARGET mov x13, xzr ret .size abi_test_clobber_x13,.-abi_test_clobber_x13 .type abi_test_clobber_x14, %function .globl abi_test_clobber_x14 .hidden abi_test_clobber_x14 .align 4 abi_test_clobber_x14: AARCH64_VALID_CALL_TARGET mov x14, xzr ret .size abi_test_clobber_x14,.-abi_test_clobber_x14 .type abi_test_clobber_x15, %function .globl abi_test_clobber_x15 .hidden abi_test_clobber_x15 .align 4 abi_test_clobber_x15: AARCH64_VALID_CALL_TARGET mov x15, xzr ret .size abi_test_clobber_x15,.-abi_test_clobber_x15 .type abi_test_clobber_x16, %function .globl abi_test_clobber_x16 .hidden abi_test_clobber_x16 .align 4 abi_test_clobber_x16: AARCH64_VALID_CALL_TARGET mov x16, xzr ret .size abi_test_clobber_x16,.-abi_test_clobber_x16 .type abi_test_clobber_x17, %function .globl abi_test_clobber_x17 .hidden abi_test_clobber_x17 .align 4 abi_test_clobber_x17: AARCH64_VALID_CALL_TARGET mov x17, xzr ret .size abi_test_clobber_x17,.-abi_test_clobber_x17 .type abi_test_clobber_x19, %function .globl abi_test_clobber_x19 .hidden abi_test_clobber_x19 .align 4 abi_test_clobber_x19: AARCH64_VALID_CALL_TARGET mov x19, xzr ret .size abi_test_clobber_x19,.-abi_test_clobber_x19 .type abi_test_clobber_x20, %function .globl abi_test_clobber_x20 .hidden abi_test_clobber_x20 .align 4 abi_test_clobber_x20: AARCH64_VALID_CALL_TARGET mov x20, xzr ret .size abi_test_clobber_x20,.-abi_test_clobber_x20 .type abi_test_clobber_x21, %function .globl abi_test_clobber_x21 .hidden abi_test_clobber_x21 .align 4 abi_test_clobber_x21: AARCH64_VALID_CALL_TARGET mov x21, xzr ret .size abi_test_clobber_x21,.-abi_test_clobber_x21 .type abi_test_clobber_x22, %function .globl abi_test_clobber_x22 .hidden abi_test_clobber_x22 .align 4 abi_test_clobber_x22: AARCH64_VALID_CALL_TARGET mov x22, xzr ret .size abi_test_clobber_x22,.-abi_test_clobber_x22 .type abi_test_clobber_x23, %function .globl abi_test_clobber_x23 .hidden abi_test_clobber_x23 .align 4 abi_test_clobber_x23: AARCH64_VALID_CALL_TARGET mov x23, xzr ret .size abi_test_clobber_x23,.-abi_test_clobber_x23 .type abi_test_clobber_x24, %function .globl abi_test_clobber_x24 .hidden abi_test_clobber_x24 .align 4 abi_test_clobber_x24: AARCH64_VALID_CALL_TARGET mov x24, xzr ret .size abi_test_clobber_x24,.-abi_test_clobber_x24 .type abi_test_clobber_x25, %function .globl abi_test_clobber_x25 .hidden abi_test_clobber_x25 .align 4 abi_test_clobber_x25: AARCH64_VALID_CALL_TARGET mov x25, xzr ret .size abi_test_clobber_x25,.-abi_test_clobber_x25 .type abi_test_clobber_x26, %function .globl abi_test_clobber_x26 .hidden abi_test_clobber_x26 .align 4 abi_test_clobber_x26: AARCH64_VALID_CALL_TARGET mov x26, xzr ret .size abi_test_clobber_x26,.-abi_test_clobber_x26 .type abi_test_clobber_x27, %function .globl abi_test_clobber_x27 .hidden abi_test_clobber_x27 .align 4 abi_test_clobber_x27: AARCH64_VALID_CALL_TARGET mov x27, xzr ret .size abi_test_clobber_x27,.-abi_test_clobber_x27 .type abi_test_clobber_x28, %function .globl abi_test_clobber_x28 .hidden abi_test_clobber_x28 .align 4 abi_test_clobber_x28: AARCH64_VALID_CALL_TARGET mov x28, xzr ret .size abi_test_clobber_x28,.-abi_test_clobber_x28 .type abi_test_clobber_x29, %function .globl abi_test_clobber_x29 .hidden abi_test_clobber_x29 .align 4 abi_test_clobber_x29: AARCH64_VALID_CALL_TARGET mov x29, xzr ret .size abi_test_clobber_x29,.-abi_test_clobber_x29 .type abi_test_clobber_d0, %function .globl abi_test_clobber_d0 .hidden abi_test_clobber_d0 .align 4 abi_test_clobber_d0: AARCH64_VALID_CALL_TARGET fmov d0, xzr ret .size abi_test_clobber_d0,.-abi_test_clobber_d0 .type abi_test_clobber_d1, %function .globl abi_test_clobber_d1 .hidden abi_test_clobber_d1 .align 4 abi_test_clobber_d1: AARCH64_VALID_CALL_TARGET fmov d1, xzr ret .size abi_test_clobber_d1,.-abi_test_clobber_d1 .type abi_test_clobber_d2, %function .globl abi_test_clobber_d2 .hidden abi_test_clobber_d2 .align 4 abi_test_clobber_d2: AARCH64_VALID_CALL_TARGET fmov d2, xzr ret .size abi_test_clobber_d2,.-abi_test_clobber_d2 .type abi_test_clobber_d3, %function .globl abi_test_clobber_d3 .hidden abi_test_clobber_d3 .align 4 abi_test_clobber_d3: AARCH64_VALID_CALL_TARGET fmov d3, xzr ret .size abi_test_clobber_d3,.-abi_test_clobber_d3 .type abi_test_clobber_d4, %function .globl abi_test_clobber_d4 .hidden abi_test_clobber_d4 .align 4 abi_test_clobber_d4: AARCH64_VALID_CALL_TARGET fmov d4, xzr ret .size abi_test_clobber_d4,.-abi_test_clobber_d4 .type abi_test_clobber_d5, %function .globl abi_test_clobber_d5 .hidden abi_test_clobber_d5 .align 4 abi_test_clobber_d5: AARCH64_VALID_CALL_TARGET fmov d5, xzr ret .size abi_test_clobber_d5,.-abi_test_clobber_d5 .type abi_test_clobber_d6, %function .globl abi_test_clobber_d6 .hidden abi_test_clobber_d6 .align 4 abi_test_clobber_d6: AARCH64_VALID_CALL_TARGET fmov d6, xzr ret .size abi_test_clobber_d6,.-abi_test_clobber_d6 .type abi_test_clobber_d7, %function .globl abi_test_clobber_d7 .hidden abi_test_clobber_d7 .align 4 abi_test_clobber_d7: AARCH64_VALID_CALL_TARGET fmov d7, xzr ret .size abi_test_clobber_d7,.-abi_test_clobber_d7 .type abi_test_clobber_d8, %function .globl abi_test_clobber_d8 .hidden abi_test_clobber_d8 .align 4 abi_test_clobber_d8: AARCH64_VALID_CALL_TARGET fmov d8, xzr ret .size abi_test_clobber_d8,.-abi_test_clobber_d8 .type abi_test_clobber_d9, %function .globl abi_test_clobber_d9 .hidden abi_test_clobber_d9 .align 4 abi_test_clobber_d9: AARCH64_VALID_CALL_TARGET fmov d9, xzr ret .size abi_test_clobber_d9,.-abi_test_clobber_d9 .type abi_test_clobber_d10, %function .globl abi_test_clobber_d10 .hidden abi_test_clobber_d10 .align 4 abi_test_clobber_d10: AARCH64_VALID_CALL_TARGET fmov d10, xzr ret .size abi_test_clobber_d10,.-abi_test_clobber_d10 .type abi_test_clobber_d11, %function .globl abi_test_clobber_d11 .hidden abi_test_clobber_d11 .align 4 abi_test_clobber_d11: AARCH64_VALID_CALL_TARGET fmov d11, xzr ret .size abi_test_clobber_d11,.-abi_test_clobber_d11 .type abi_test_clobber_d12, %function .globl abi_test_clobber_d12 .hidden abi_test_clobber_d12 .align 4 abi_test_clobber_d12: AARCH64_VALID_CALL_TARGET fmov d12, xzr ret .size abi_test_clobber_d12,.-abi_test_clobber_d12 .type abi_test_clobber_d13, %function .globl abi_test_clobber_d13 .hidden abi_test_clobber_d13 .align 4 abi_test_clobber_d13: AARCH64_VALID_CALL_TARGET fmov d13, xzr ret .size abi_test_clobber_d13,.-abi_test_clobber_d13 .type abi_test_clobber_d14, %function .globl abi_test_clobber_d14 .hidden abi_test_clobber_d14 .align 4 abi_test_clobber_d14: AARCH64_VALID_CALL_TARGET fmov d14, xzr ret .size abi_test_clobber_d14,.-abi_test_clobber_d14 .type abi_test_clobber_d15, %function .globl abi_test_clobber_d15 .hidden abi_test_clobber_d15 .align 4 abi_test_clobber_d15: AARCH64_VALID_CALL_TARGET fmov d15, xzr ret .size abi_test_clobber_d15,.-abi_test_clobber_d15 .type abi_test_clobber_d16, %function .globl abi_test_clobber_d16 .hidden abi_test_clobber_d16 .align 4 abi_test_clobber_d16: AARCH64_VALID_CALL_TARGET fmov d16, xzr ret .size abi_test_clobber_d16,.-abi_test_clobber_d16 .type abi_test_clobber_d17, %function .globl abi_test_clobber_d17 .hidden abi_test_clobber_d17 .align 4 abi_test_clobber_d17: AARCH64_VALID_CALL_TARGET fmov d17, xzr ret .size abi_test_clobber_d17,.-abi_test_clobber_d17 .type abi_test_clobber_d18, %function .globl abi_test_clobber_d18 .hidden abi_test_clobber_d18 .align 4 abi_test_clobber_d18: AARCH64_VALID_CALL_TARGET fmov d18, xzr ret .size abi_test_clobber_d18,.-abi_test_clobber_d18 .type abi_test_clobber_d19, %function .globl abi_test_clobber_d19 .hidden abi_test_clobber_d19 .align 4 abi_test_clobber_d19: AARCH64_VALID_CALL_TARGET fmov d19, xzr ret .size abi_test_clobber_d19,.-abi_test_clobber_d19 .type abi_test_clobber_d20, %function .globl abi_test_clobber_d20 .hidden abi_test_clobber_d20 .align 4 abi_test_clobber_d20: AARCH64_VALID_CALL_TARGET fmov d20, xzr ret .size abi_test_clobber_d20,.-abi_test_clobber_d20 .type abi_test_clobber_d21, %function .globl abi_test_clobber_d21 .hidden abi_test_clobber_d21 .align 4 abi_test_clobber_d21: AARCH64_VALID_CALL_TARGET fmov d21, xzr ret .size abi_test_clobber_d21,.-abi_test_clobber_d21 .type abi_test_clobber_d22, %function .globl abi_test_clobber_d22 .hidden abi_test_clobber_d22 .align 4 abi_test_clobber_d22: AARCH64_VALID_CALL_TARGET fmov d22, xzr ret .size abi_test_clobber_d22,.-abi_test_clobber_d22 .type abi_test_clobber_d23, %function .globl abi_test_clobber_d23 .hidden abi_test_clobber_d23 .align 4 abi_test_clobber_d23: AARCH64_VALID_CALL_TARGET fmov d23, xzr ret .size abi_test_clobber_d23,.-abi_test_clobber_d23 .type abi_test_clobber_d24, %function .globl abi_test_clobber_d24 .hidden abi_test_clobber_d24 .align 4 abi_test_clobber_d24: AARCH64_VALID_CALL_TARGET fmov d24, xzr ret .size abi_test_clobber_d24,.-abi_test_clobber_d24 .type abi_test_clobber_d25, %function .globl abi_test_clobber_d25 .hidden abi_test_clobber_d25 .align 4 abi_test_clobber_d25: AARCH64_VALID_CALL_TARGET fmov d25, xzr ret .size abi_test_clobber_d25,.-abi_test_clobber_d25 .type abi_test_clobber_d26, %function .globl abi_test_clobber_d26 .hidden abi_test_clobber_d26 .align 4 abi_test_clobber_d26: AARCH64_VALID_CALL_TARGET fmov d26, xzr ret .size abi_test_clobber_d26,.-abi_test_clobber_d26 .type abi_test_clobber_d27, %function .globl abi_test_clobber_d27 .hidden abi_test_clobber_d27 .align 4 abi_test_clobber_d27: AARCH64_VALID_CALL_TARGET fmov d27, xzr ret .size abi_test_clobber_d27,.-abi_test_clobber_d27 .type abi_test_clobber_d28, %function .globl abi_test_clobber_d28 .hidden abi_test_clobber_d28 .align 4 abi_test_clobber_d28: AARCH64_VALID_CALL_TARGET fmov d28, xzr ret .size abi_test_clobber_d28,.-abi_test_clobber_d28 .type abi_test_clobber_d29, %function .globl abi_test_clobber_d29 .hidden abi_test_clobber_d29 .align 4 abi_test_clobber_d29: AARCH64_VALID_CALL_TARGET fmov d29, xzr ret .size abi_test_clobber_d29,.-abi_test_clobber_d29 .type abi_test_clobber_d30, %function .globl abi_test_clobber_d30 .hidden abi_test_clobber_d30 .align 4 abi_test_clobber_d30: AARCH64_VALID_CALL_TARGET fmov d30, xzr ret .size abi_test_clobber_d30,.-abi_test_clobber_d30 .type abi_test_clobber_d31, %function .globl abi_test_clobber_d31 .hidden abi_test_clobber_d31 .align 4 abi_test_clobber_d31: AARCH64_VALID_CALL_TARGET fmov d31, xzr ret .size abi_test_clobber_d31,.-abi_test_clobber_d31 .type abi_test_clobber_v8_upper, %function .globl abi_test_clobber_v8_upper .hidden abi_test_clobber_v8_upper .align 4 abi_test_clobber_v8_upper: AARCH64_VALID_CALL_TARGET fmov v8.d[1], xzr ret .size abi_test_clobber_v8_upper,.-abi_test_clobber_v8_upper .type abi_test_clobber_v9_upper, %function .globl abi_test_clobber_v9_upper .hidden abi_test_clobber_v9_upper .align 4 abi_test_clobber_v9_upper: AARCH64_VALID_CALL_TARGET fmov v9.d[1], xzr ret .size abi_test_clobber_v9_upper,.-abi_test_clobber_v9_upper .type abi_test_clobber_v10_upper, %function .globl abi_test_clobber_v10_upper .hidden abi_test_clobber_v10_upper .align 4 abi_test_clobber_v10_upper: AARCH64_VALID_CALL_TARGET fmov v10.d[1], xzr ret .size abi_test_clobber_v10_upper,.-abi_test_clobber_v10_upper .type abi_test_clobber_v11_upper, %function .globl abi_test_clobber_v11_upper .hidden abi_test_clobber_v11_upper .align 4 abi_test_clobber_v11_upper: AARCH64_VALID_CALL_TARGET fmov v11.d[1], xzr ret .size abi_test_clobber_v11_upper,.-abi_test_clobber_v11_upper .type abi_test_clobber_v12_upper, %function .globl abi_test_clobber_v12_upper .hidden abi_test_clobber_v12_upper .align 4 abi_test_clobber_v12_upper: AARCH64_VALID_CALL_TARGET fmov v12.d[1], xzr ret .size abi_test_clobber_v12_upper,.-abi_test_clobber_v12_upper .type abi_test_clobber_v13_upper, %function .globl abi_test_clobber_v13_upper .hidden abi_test_clobber_v13_upper .align 4 abi_test_clobber_v13_upper: AARCH64_VALID_CALL_TARGET fmov v13.d[1], xzr ret .size abi_test_clobber_v13_upper,.-abi_test_clobber_v13_upper .type abi_test_clobber_v14_upper, %function .globl abi_test_clobber_v14_upper .hidden abi_test_clobber_v14_upper .align 4 abi_test_clobber_v14_upper: AARCH64_VALID_CALL_TARGET fmov v14.d[1], xzr ret .size abi_test_clobber_v14_upper,.-abi_test_clobber_v14_upper .type abi_test_clobber_v15_upper, %function .globl abi_test_clobber_v15_upper .hidden abi_test_clobber_v15_upper .align 4 abi_test_clobber_v15_upper: AARCH64_VALID_CALL_TARGET fmov v15.d[1], xzr ret .size abi_test_clobber_v15_upper,.-abi_test_clobber_v15_upper #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
47,788
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/aesv8-armx.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> #if __ARM_MAX_ARCH__>=7 .text .arch armv8-a+crypto .section .rodata .align 5 .Lrcon: .long 0x01,0x01,0x01,0x01 .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d // rotate-n-splat .long 0x1b,0x1b,0x1b,0x1b .text .globl aes_hw_set_encrypt_key .hidden aes_hw_set_encrypt_key .type aes_hw_set_encrypt_key,%function .align 5 aes_hw_set_encrypt_key: .Lenc_key: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#3] // kFlag_aes_hw_set_encrypt_key #endif // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 mov x3,#-1 cmp x0,#0 b.eq .Lenc_key_abort cmp x2,#0 b.eq .Lenc_key_abort mov x3,#-2 cmp w1,#128 b.lt .Lenc_key_abort cmp w1,#256 b.gt .Lenc_key_abort tst w1,#0x3f b.ne .Lenc_key_abort adrp x3,.Lrcon add x3,x3,:lo12:.Lrcon cmp w1,#192 eor v0.16b,v0.16b,v0.16b ld1 {v3.16b},[x0],#16 mov w1,#8 // reuse w1 ld1 {v1.4s,v2.4s},[x3],#32 b.lt .Loop128 b.eq .L192 b .L256 .align 4 .Loop128: tbl v6.16b,{v3.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v3.4s},[x2],#16 aese v6.16b,v0.16b subs w1,w1,#1 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v6.16b,v6.16b,v1.16b eor v3.16b,v3.16b,v5.16b shl v1.16b,v1.16b,#1 eor v3.16b,v3.16b,v6.16b b.ne .Loop128 ld1 {v1.4s},[x3] tbl v6.16b,{v3.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v3.4s},[x2],#16 aese v6.16b,v0.16b eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v6.16b,v6.16b,v1.16b eor v3.16b,v3.16b,v5.16b shl v1.16b,v1.16b,#1 eor v3.16b,v3.16b,v6.16b tbl v6.16b,{v3.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v3.4s},[x2],#16 aese v6.16b,v0.16b eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v6.16b,v6.16b,v1.16b eor v3.16b,v3.16b,v5.16b eor v3.16b,v3.16b,v6.16b st1 {v3.4s},[x2] add x2,x2,#0x50 mov w12,#10 b .Ldone .align 4 .L192: ld1 {v4.8b},[x0],#8 movi v6.16b,#8 // borrow v6.16b st1 {v3.4s},[x2],#16 sub v2.16b,v2.16b,v6.16b // adjust the mask .Loop192: tbl v6.16b,{v4.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v4.8b},[x2],#8 aese v6.16b,v0.16b subs w1,w1,#1 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b dup v5.4s,v3.s[3] eor v5.16b,v5.16b,v4.16b eor v6.16b,v6.16b,v1.16b ext v4.16b,v0.16b,v4.16b,#12 shl v1.16b,v1.16b,#1 eor v4.16b,v4.16b,v5.16b eor v3.16b,v3.16b,v6.16b eor v4.16b,v4.16b,v6.16b st1 {v3.4s},[x2],#16 b.ne .Loop192 mov w12,#12 add x2,x2,#0x20 b .Ldone .align 4 .L256: ld1 {v4.16b},[x0] mov w1,#7 mov w12,#14 st1 {v3.4s},[x2],#16 .Loop256: tbl v6.16b,{v4.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v4.4s},[x2],#16 aese v6.16b,v0.16b subs w1,w1,#1 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v6.16b,v6.16b,v1.16b eor v3.16b,v3.16b,v5.16b shl v1.16b,v1.16b,#1 eor v3.16b,v3.16b,v6.16b st1 {v3.4s},[x2],#16 b.eq .Ldone dup v6.4s,v3.s[3] // just splat ext v5.16b,v0.16b,v4.16b,#12 aese v6.16b,v0.16b eor v4.16b,v4.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v4.16b,v4.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v4.16b,v4.16b,v5.16b eor v4.16b,v4.16b,v6.16b b .Loop256 .Ldone: str w12,[x2] mov x3,#0 .Lenc_key_abort: mov x0,x3 // return value ldr x29,[sp],#16 ret .size aes_hw_set_encrypt_key,.-aes_hw_set_encrypt_key .globl aes_hw_set_decrypt_key .hidden aes_hw_set_decrypt_key .type aes_hw_set_decrypt_key,%function .align 5 aes_hw_set_decrypt_key: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 bl .Lenc_key cmp x0,#0 b.ne .Ldec_key_abort sub x2,x2,#240 // restore original x2 mov x4,#-16 add x0,x2,x12,lsl#4 // end of key schedule ld1 {v0.4s},[x2] ld1 {v1.4s},[x0] st1 {v0.4s},[x0],x4 st1 {v1.4s},[x2],#16 .Loop_imc: ld1 {v0.4s},[x2] ld1 {v1.4s},[x0] aesimc v0.16b,v0.16b aesimc v1.16b,v1.16b st1 {v0.4s},[x0],x4 st1 {v1.4s},[x2],#16 cmp x0,x2 b.hi .Loop_imc ld1 {v0.4s},[x2] aesimc v0.16b,v0.16b st1 {v0.4s},[x0] eor x0,x0,x0 // return value .Ldec_key_abort: ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size aes_hw_set_decrypt_key,.-aes_hw_set_decrypt_key .globl aes_hw_encrypt .hidden aes_hw_encrypt .type aes_hw_encrypt,%function .align 5 aes_hw_encrypt: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#1] // kFlag_aes_hw_encrypt #endif AARCH64_VALID_CALL_TARGET ldr w3,[x2,#240] ld1 {v0.4s},[x2],#16 ld1 {v2.16b},[x0] sub w3,w3,#2 ld1 {v1.4s},[x2],#16 .Loop_enc: aese v2.16b,v0.16b aesmc v2.16b,v2.16b ld1 {v0.4s},[x2],#16 subs w3,w3,#2 aese v2.16b,v1.16b aesmc v2.16b,v2.16b ld1 {v1.4s},[x2],#16 b.gt .Loop_enc aese v2.16b,v0.16b aesmc v2.16b,v2.16b ld1 {v0.4s},[x2] aese v2.16b,v1.16b eor v2.16b,v2.16b,v0.16b st1 {v2.16b},[x1] ret .size aes_hw_encrypt,.-aes_hw_encrypt .globl aes_hw_decrypt .hidden aes_hw_decrypt .type aes_hw_decrypt,%function .align 5 aes_hw_decrypt: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#1] // kFlag_aes_hw_encrypt #endif AARCH64_VALID_CALL_TARGET ldr w3,[x2,#240] ld1 {v0.4s},[x2],#16 ld1 {v2.16b},[x0] sub w3,w3,#2 ld1 {v1.4s},[x2],#16 .Loop_dec: aesd v2.16b,v0.16b aesimc v2.16b,v2.16b ld1 {v0.4s},[x2],#16 subs w3,w3,#2 aesd v2.16b,v1.16b aesimc v2.16b,v2.16b ld1 {v1.4s},[x2],#16 b.gt .Loop_dec aesd v2.16b,v0.16b aesimc v2.16b,v2.16b ld1 {v0.4s},[x2] aesd v2.16b,v1.16b eor v2.16b,v2.16b,v0.16b st1 {v2.16b},[x1] ret .size aes_hw_decrypt,.-aes_hw_decrypt .globl aes_hw_cbc_encrypt .hidden aes_hw_cbc_encrypt .type aes_hw_cbc_encrypt,%function .align 5 aes_hw_cbc_encrypt: // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 subs x2,x2,#16 mov x8,#16 b.lo .Lcbc_abort csel x8,xzr,x8,eq cmp w5,#0 // en- or decrypting? ldr w5,[x3,#240] and x2,x2,#-16 ld1 {v6.16b},[x4] ld1 {v0.16b},[x0],x8 ld1 {v16.4s,v17.4s},[x3] // load key schedule... sub w5,w5,#6 add x7,x3,x5,lsl#4 // pointer to last 7 round keys sub w5,w5,#2 ld1 {v18.4s,v19.4s},[x7],#32 ld1 {v20.4s,v21.4s},[x7],#32 ld1 {v22.4s,v23.4s},[x7],#32 ld1 {v7.4s},[x7] add x7,x3,#32 mov w6,w5 b.eq .Lcbc_dec cmp w5,#2 eor v0.16b,v0.16b,v6.16b eor v5.16b,v16.16b,v7.16b b.eq .Lcbc_enc128 ld1 {v2.4s,v3.4s},[x7] add x7,x3,#16 add x6,x3,#16*4 add x12,x3,#16*5 aese v0.16b,v16.16b aesmc v0.16b,v0.16b add x14,x3,#16*6 add x3,x3,#16*7 b .Lenter_cbc_enc .align 4 .Loop_cbc_enc: aese v0.16b,v16.16b aesmc v0.16b,v0.16b st1 {v6.16b},[x1],#16 .Lenter_cbc_enc: aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v0.16b,v2.16b aesmc v0.16b,v0.16b ld1 {v16.4s},[x6] cmp w5,#4 aese v0.16b,v3.16b aesmc v0.16b,v0.16b ld1 {v17.4s},[x12] b.eq .Lcbc_enc192 aese v0.16b,v16.16b aesmc v0.16b,v0.16b ld1 {v16.4s},[x14] aese v0.16b,v17.16b aesmc v0.16b,v0.16b ld1 {v17.4s},[x3] nop .Lcbc_enc192: aese v0.16b,v16.16b aesmc v0.16b,v0.16b subs x2,x2,#16 aese v0.16b,v17.16b aesmc v0.16b,v0.16b csel x8,xzr,x8,eq aese v0.16b,v18.16b aesmc v0.16b,v0.16b aese v0.16b,v19.16b aesmc v0.16b,v0.16b ld1 {v16.16b},[x0],x8 aese v0.16b,v20.16b aesmc v0.16b,v0.16b eor v16.16b,v16.16b,v5.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b ld1 {v17.4s},[x7] // re-pre-load rndkey[1] aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v0.16b,v23.16b eor v6.16b,v0.16b,v7.16b b.hs .Loop_cbc_enc st1 {v6.16b},[x1],#16 b .Lcbc_done .align 5 .Lcbc_enc128: ld1 {v2.4s,v3.4s},[x7] aese v0.16b,v16.16b aesmc v0.16b,v0.16b b .Lenter_cbc_enc128 .Loop_cbc_enc128: aese v0.16b,v16.16b aesmc v0.16b,v0.16b st1 {v6.16b},[x1],#16 .Lenter_cbc_enc128: aese v0.16b,v17.16b aesmc v0.16b,v0.16b subs x2,x2,#16 aese v0.16b,v2.16b aesmc v0.16b,v0.16b csel x8,xzr,x8,eq aese v0.16b,v3.16b aesmc v0.16b,v0.16b aese v0.16b,v18.16b aesmc v0.16b,v0.16b aese v0.16b,v19.16b aesmc v0.16b,v0.16b ld1 {v16.16b},[x0],x8 aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b aese v0.16b,v22.16b aesmc v0.16b,v0.16b eor v16.16b,v16.16b,v5.16b aese v0.16b,v23.16b eor v6.16b,v0.16b,v7.16b b.hs .Loop_cbc_enc128 st1 {v6.16b},[x1],#16 b .Lcbc_done .align 5 .Lcbc_dec: ld1 {v18.16b},[x0],#16 subs x2,x2,#32 // bias add w6,w5,#2 orr v3.16b,v0.16b,v0.16b orr v1.16b,v0.16b,v0.16b orr v19.16b,v18.16b,v18.16b b.lo .Lcbc_dec_tail orr v1.16b,v18.16b,v18.16b ld1 {v18.16b},[x0],#16 orr v2.16b,v0.16b,v0.16b orr v3.16b,v1.16b,v1.16b orr v19.16b,v18.16b,v18.16b .Loop3x_cbc_dec: aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v18.16b,v16.16b aesimc v18.16b,v18.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v18.16b,v17.16b aesimc v18.16b,v18.16b ld1 {v17.4s},[x7],#16 b.gt .Loop3x_cbc_dec aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v18.16b,v16.16b aesimc v18.16b,v18.16b eor v4.16b,v6.16b,v7.16b subs x2,x2,#0x30 eor v5.16b,v2.16b,v7.16b csel x6,x2,x6,lo // x6, w6, is zero at this point aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v18.16b,v17.16b aesimc v18.16b,v18.16b eor v17.16b,v3.16b,v7.16b add x0,x0,x6 // x0 is adjusted in such way that // at exit from the loop v1.16b-v18.16b // are loaded with last "words" orr v6.16b,v19.16b,v19.16b mov x7,x3 aesd v0.16b,v20.16b aesimc v0.16b,v0.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v18.16b,v20.16b aesimc v18.16b,v18.16b ld1 {v2.16b},[x0],#16 aesd v0.16b,v21.16b aesimc v0.16b,v0.16b aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v18.16b,v21.16b aesimc v18.16b,v18.16b ld1 {v3.16b},[x0],#16 aesd v0.16b,v22.16b aesimc v0.16b,v0.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v18.16b,v22.16b aesimc v18.16b,v18.16b ld1 {v19.16b},[x0],#16 aesd v0.16b,v23.16b aesd v1.16b,v23.16b aesd v18.16b,v23.16b ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] add w6,w5,#2 eor v4.16b,v4.16b,v0.16b eor v5.16b,v5.16b,v1.16b eor v18.16b,v18.16b,v17.16b ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v4.16b},[x1],#16 orr v0.16b,v2.16b,v2.16b st1 {v5.16b},[x1],#16 orr v1.16b,v3.16b,v3.16b st1 {v18.16b},[x1],#16 orr v18.16b,v19.16b,v19.16b b.hs .Loop3x_cbc_dec cmn x2,#0x30 b.eq .Lcbc_done nop .Lcbc_dec_tail: aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v18.16b,v16.16b aesimc v18.16b,v18.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v18.16b,v17.16b aesimc v18.16b,v18.16b ld1 {v17.4s},[x7],#16 b.gt .Lcbc_dec_tail aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v18.16b,v16.16b aesimc v18.16b,v18.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v18.16b,v17.16b aesimc v18.16b,v18.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v18.16b,v20.16b aesimc v18.16b,v18.16b cmn x2,#0x20 aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v18.16b,v21.16b aesimc v18.16b,v18.16b eor v5.16b,v6.16b,v7.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v18.16b,v22.16b aesimc v18.16b,v18.16b eor v17.16b,v3.16b,v7.16b aesd v1.16b,v23.16b aesd v18.16b,v23.16b b.eq .Lcbc_dec_one eor v5.16b,v5.16b,v1.16b eor v17.16b,v17.16b,v18.16b orr v6.16b,v19.16b,v19.16b st1 {v5.16b},[x1],#16 st1 {v17.16b},[x1],#16 b .Lcbc_done .Lcbc_dec_one: eor v5.16b,v5.16b,v18.16b orr v6.16b,v19.16b,v19.16b st1 {v5.16b},[x1],#16 .Lcbc_done: st1 {v6.16b},[x4] .Lcbc_abort: ldr x29,[sp],#16 ret .size aes_hw_cbc_encrypt,.-aes_hw_cbc_encrypt .globl aes_hw_ctr32_encrypt_blocks .hidden aes_hw_ctr32_encrypt_blocks .type aes_hw_ctr32_encrypt_blocks,%function .align 5 aes_hw_ctr32_encrypt_blocks: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9] // kFlag_aes_hw_ctr32_encrypt_blocks #endif // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 ldr w5,[x3,#240] ldr w8, [x4, #12] ld1 {v0.4s},[x4] ld1 {v16.4s,v17.4s},[x3] // load key schedule... sub w5,w5,#4 mov x12,#16 cmp x2,#2 add x7,x3,x5,lsl#4 // pointer to last 5 round keys sub w5,w5,#2 ld1 {v20.4s,v21.4s},[x7],#32 ld1 {v22.4s,v23.4s},[x7],#32 ld1 {v7.4s},[x7] add x7,x3,#32 mov w6,w5 // ARM Cortex-A57 and Cortex-A72 cores running in 32-bit mode are // affected by silicon errata #1742098 [0] and #1655431 [1], // respectively, where the second instruction of an aese/aesmc // instruction pair may execute twice if an interrupt is taken right // after the first instruction consumes an input register of which a // single 32-bit lane has been updated the last time it was modified. // // This function uses a counter in one 32-bit lane. The vmov lines // could write to v1.16b and v18.16b directly, but that trips this bugs. // We write to v6.16b and copy to the final register as a workaround. // // [0] ARM-EPM-049219 v23 Cortex-A57 MPCore Software Developers Errata Notice // [1] ARM-EPM-012079 v11.0 Cortex-A72 MPCore Software Developers Errata Notice #ifndef __AARCH64EB__ rev w8, w8 #endif add w10, w8, #1 orr v6.16b,v0.16b,v0.16b rev w10, w10 mov v6.s[3],w10 add w8, w8, #2 orr v1.16b,v6.16b,v6.16b b.ls .Lctr32_tail rev w12, w8 mov v6.s[3],w12 sub x2,x2,#3 // bias orr v18.16b,v6.16b,v6.16b b .Loop3x_ctr32 .align 4 .Loop3x_ctr32: aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v18.16b,v16.16b aesmc v18.16b,v18.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v18.16b,v17.16b aesmc v18.16b,v18.16b ld1 {v17.4s},[x7],#16 b.gt .Loop3x_ctr32 aese v0.16b,v16.16b aesmc v4.16b,v0.16b aese v1.16b,v16.16b aesmc v5.16b,v1.16b ld1 {v2.16b},[x0],#16 add w9,w8,#1 aese v18.16b,v16.16b aesmc v18.16b,v18.16b ld1 {v3.16b},[x0],#16 rev w9,w9 aese v4.16b,v17.16b aesmc v4.16b,v4.16b aese v5.16b,v17.16b aesmc v5.16b,v5.16b ld1 {v19.16b},[x0],#16 mov x7,x3 aese v18.16b,v17.16b aesmc v17.16b,v18.16b aese v4.16b,v20.16b aesmc v4.16b,v4.16b aese v5.16b,v20.16b aesmc v5.16b,v5.16b eor v2.16b,v2.16b,v7.16b add w10,w8,#2 aese v17.16b,v20.16b aesmc v17.16b,v17.16b eor v3.16b,v3.16b,v7.16b add w8,w8,#3 aese v4.16b,v21.16b aesmc v4.16b,v4.16b aese v5.16b,v21.16b aesmc v5.16b,v5.16b // Note the logic to update v0.16b, v1.16b, and v1.16b is written to work // around a bug in ARM Cortex-A57 and Cortex-A72 cores running in // 32-bit mode. See the comment above. eor v19.16b,v19.16b,v7.16b mov v6.s[3], w9 aese v17.16b,v21.16b aesmc v17.16b,v17.16b orr v0.16b,v6.16b,v6.16b rev w10,w10 aese v4.16b,v22.16b aesmc v4.16b,v4.16b mov v6.s[3], w10 rev w12,w8 aese v5.16b,v22.16b aesmc v5.16b,v5.16b orr v1.16b,v6.16b,v6.16b mov v6.s[3], w12 aese v17.16b,v22.16b aesmc v17.16b,v17.16b orr v18.16b,v6.16b,v6.16b subs x2,x2,#3 aese v4.16b,v23.16b aese v5.16b,v23.16b aese v17.16b,v23.16b eor v2.16b,v2.16b,v4.16b ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] st1 {v2.16b},[x1],#16 eor v3.16b,v3.16b,v5.16b mov w6,w5 st1 {v3.16b},[x1],#16 eor v19.16b,v19.16b,v17.16b ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v19.16b},[x1],#16 b.hs .Loop3x_ctr32 adds x2,x2,#3 b.eq .Lctr32_done .Lctr32_tail: cmp x2,#1 b.lt .Lctr32_done // if len = 0, go to done mov x12,#16 csel x12,xzr,x12,eq aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b ld1 {v17.4s},[x7],#16 b.gt .Lctr32_tail aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b ld1 {v2.16b},[x0],x12 aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v1.16b,v20.16b aesmc v1.16b,v1.16b ld1 {v3.16b},[x0] aese v0.16b,v21.16b aesmc v0.16b,v0.16b aese v1.16b,v21.16b aesmc v1.16b,v1.16b eor v2.16b,v2.16b,v7.16b aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v1.16b,v22.16b aesmc v1.16b,v1.16b eor v3.16b,v3.16b,v7.16b aese v0.16b,v23.16b aese v1.16b,v23.16b eor v2.16b,v2.16b,v0.16b eor v3.16b,v3.16b,v1.16b st1 {v2.16b},[x1],#16 cbz x12,.Lctr32_done // if step = 0 (len = 1), go to done st1 {v3.16b},[x1] .Lctr32_done: ldr x29,[sp],#16 ret .size aes_hw_ctr32_encrypt_blocks,.-aes_hw_ctr32_encrypt_blocks .globl aes_hw_xts_encrypt .hidden aes_hw_xts_encrypt .type aes_hw_xts_encrypt,%function .align 5 aes_hw_xts_encrypt: AARCH64_VALID_CALL_TARGET cmp x2,#16 // Original input data size bigger than 16, jump to big size processing. b.ne .Lxts_enc_big_size // Encrypt the iv with key2, as the first XEX iv. ldr w6,[x4,#240] ld1 {v0.16b},[x4],#16 ld1 {v6.16b},[x5] sub w6,w6,#2 ld1 {v1.16b},[x4],#16 .Loop_enc_iv_enc: aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4],#16 subs w6,w6,#2 aese v6.16b,v1.16b aesmc v6.16b,v6.16b ld1 {v1.4s},[x4],#16 b.gt .Loop_enc_iv_enc aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4] aese v6.16b,v1.16b eor v6.16b,v6.16b,v0.16b ld1 {v0.16b},[x0] eor v0.16b,v6.16b,v0.16b ldr w6,[x3,#240] ld1 {v28.4s,v29.4s},[x3],#32 // load key schedule... aese v0.16b,v28.16b aesmc v0.16b,v0.16b ld1 {v16.4s,v17.4s},[x3],#32 // load key schedule... aese v0.16b,v29.16b aesmc v0.16b,v0.16b subs w6,w6,#10 // if rounds==10, jump to aes-128-xts processing b.eq .Lxts_128_enc .Lxts_enc_round_loop: aese v0.16b,v16.16b aesmc v0.16b,v0.16b ld1 {v16.4s},[x3],#16 // load key schedule... aese v0.16b,v17.16b aesmc v0.16b,v0.16b ld1 {v17.4s},[x3],#16 // load key schedule... subs w6,w6,#2 // bias b.gt .Lxts_enc_round_loop .Lxts_128_enc: ld1 {v18.4s,v19.4s},[x3],#32 // load key schedule... aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v0.16b,v17.16b aesmc v0.16b,v0.16b ld1 {v20.4s,v21.4s},[x3],#32 // load key schedule... aese v0.16b,v18.16b aesmc v0.16b,v0.16b aese v0.16b,v19.16b aesmc v0.16b,v0.16b ld1 {v22.4s,v23.4s},[x3],#32 // load key schedule... aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b ld1 {v7.4s},[x3] aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v0.16b,v23.16b eor v0.16b,v0.16b,v7.16b eor v0.16b,v0.16b,v6.16b st1 {v0.16b},[x1] b .Lxts_enc_final_abort .align 4 .Lxts_enc_big_size: // Encrypt input size > 16 bytes stp x19,x20,[sp,#-64]! stp x21,x22,[sp,#48] stp d8,d9,[sp,#32] stp d10,d11,[sp,#16] // tailcnt store the tail value of length%16. and x21,x2,#0xf and x2,x2,#-16 // len &= 0x1..110000, now divisible by 16 subs x2,x2,#16 mov x8,#16 b.lo .Lxts_abort // if !(len > 16): error csel x8,xzr,x8,eq // if (len == 16): step = 0 // Firstly, encrypt the iv with key2, as the first iv of XEX. ldr w6,[x4,#240] ld1 {v0.4s},[x4],#16 ld1 {v6.16b},[x5] sub w6,w6,#2 ld1 {v1.4s},[x4],#16 .Loop_iv_enc: aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4],#16 subs w6,w6,#2 aese v6.16b,v1.16b aesmc v6.16b,v6.16b ld1 {v1.4s},[x4],#16 b.gt .Loop_iv_enc aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4] aese v6.16b,v1.16b eor v6.16b,v6.16b,v0.16b // The iv for second block // x9- iv(low), x10 - iv(high) // the five ivs stored into, v6.16b,v8.16b,v9.16b,v10.16b,v11.16b fmov x9,d6 fmov x10,v6.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d8,x9 fmov v8.d[1],x10 ldr w5,[x3,#240] // next starting point ld1 {v0.16b},[x0],x8 ld1 {v16.4s,v17.4s},[x3] // load key schedule... sub w5,w5,#6 add x7,x3,x5,lsl#4 // pointer to last 7 round keys sub w5,w5,#2 ld1 {v18.4s,v19.4s},[x7],#32 ld1 {v20.4s,v21.4s},[x7],#32 ld1 {v22.4s,v23.4s},[x7],#32 ld1 {v7.4s},[x7] add x7,x3,#32 mov w6,w5 // Encryption .Lxts_enc: ld1 {v24.16b},[x0],#16 subs x2,x2,#32 // bias add w6,w5,#2 orr v3.16b,v0.16b,v0.16b orr v1.16b,v0.16b,v0.16b orr v28.16b,v0.16b,v0.16b orr v27.16b,v24.16b,v24.16b orr v29.16b,v24.16b,v24.16b b.lo .Lxts_inner_enc_tail // when input size % 5 = 1 or 2 // (with tail or not) eor v0.16b,v0.16b,v6.16b // before encryption, xor with iv eor v24.16b,v24.16b,v8.16b // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d9,x9 fmov v9.d[1],x10 orr v1.16b,v24.16b,v24.16b ld1 {v24.16b},[x0],#16 orr v2.16b,v0.16b,v0.16b orr v3.16b,v1.16b,v1.16b eor v27.16b,v24.16b,v9.16b // the third block eor v24.16b,v24.16b,v9.16b cmp x2,#32 b.lo .Lxts_outer_enc_tail // The iv for fourth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d10,x9 fmov v10.d[1],x10 ld1 {v25.16b},[x0],#16 // The iv for fifth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d11,x9 fmov v11.d[1],x10 ld1 {v26.16b},[x0],#16 eor v25.16b,v25.16b,v10.16b // the fourth block eor v26.16b,v26.16b,v11.16b sub x2,x2,#32 // bias mov w6,w5 b .Loop5x_xts_enc .align 4 .Loop5x_xts_enc: aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b aese v25.16b,v16.16b aesmc v25.16b,v25.16b aese v26.16b,v16.16b aesmc v26.16b,v26.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b aese v25.16b,v17.16b aesmc v25.16b,v25.16b aese v26.16b,v17.16b aesmc v26.16b,v26.16b ld1 {v17.4s},[x7],#16 b.gt .Loop5x_xts_enc aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b aese v25.16b,v16.16b aesmc v25.16b,v25.16b aese v26.16b,v16.16b aesmc v26.16b,v26.16b subs x2,x2,#0x50 // because .Lxts_enc_tail4x aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b aese v25.16b,v17.16b aesmc v25.16b,v25.16b aese v26.16b,v17.16b aesmc v26.16b,v26.16b csel x6,xzr,x2,gt // borrow x6, w6, "gt" is not typo mov x7,x3 aese v0.16b,v18.16b aesmc v0.16b,v0.16b aese v1.16b,v18.16b aesmc v1.16b,v1.16b aese v24.16b,v18.16b aesmc v24.16b,v24.16b aese v25.16b,v18.16b aesmc v25.16b,v25.16b aese v26.16b,v18.16b aesmc v26.16b,v26.16b add x0,x0,x6 // x0 is adjusted in such way that // at exit from the loop v1.16b-v26.16b // are loaded with last "words" add x6,x2,#0x60 // because .Lxts_enc_tail4x aese v0.16b,v19.16b aesmc v0.16b,v0.16b aese v1.16b,v19.16b aesmc v1.16b,v1.16b aese v24.16b,v19.16b aesmc v24.16b,v24.16b aese v25.16b,v19.16b aesmc v25.16b,v25.16b aese v26.16b,v19.16b aesmc v26.16b,v26.16b aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v1.16b,v20.16b aesmc v1.16b,v1.16b aese v24.16b,v20.16b aesmc v24.16b,v24.16b aese v25.16b,v20.16b aesmc v25.16b,v25.16b aese v26.16b,v20.16b aesmc v26.16b,v26.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b aese v1.16b,v21.16b aesmc v1.16b,v1.16b aese v24.16b,v21.16b aesmc v24.16b,v24.16b aese v25.16b,v21.16b aesmc v25.16b,v25.16b aese v26.16b,v21.16b aesmc v26.16b,v26.16b aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v1.16b,v22.16b aesmc v1.16b,v1.16b aese v24.16b,v22.16b aesmc v24.16b,v24.16b aese v25.16b,v22.16b aesmc v25.16b,v25.16b aese v26.16b,v22.16b aesmc v26.16b,v26.16b eor v4.16b,v7.16b,v6.16b aese v0.16b,v23.16b // The iv for first block of one iteration extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d6,x9 fmov v6.d[1],x10 eor v5.16b,v7.16b,v8.16b ld1 {v2.16b},[x0],#16 aese v1.16b,v23.16b // The iv for second block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d8,x9 fmov v8.d[1],x10 eor v17.16b,v7.16b,v9.16b ld1 {v3.16b},[x0],#16 aese v24.16b,v23.16b // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d9,x9 fmov v9.d[1],x10 eor v30.16b,v7.16b,v10.16b ld1 {v27.16b},[x0],#16 aese v25.16b,v23.16b // The iv for fourth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d10,x9 fmov v10.d[1],x10 eor v31.16b,v7.16b,v11.16b ld1 {v28.16b},[x0],#16 aese v26.16b,v23.16b // The iv for fifth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d11,x9 fmov v11.d[1],x10 ld1 {v29.16b},[x0],#16 cbz x6,.Lxts_enc_tail4x ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] eor v4.16b,v4.16b,v0.16b eor v0.16b,v2.16b,v6.16b eor v5.16b,v5.16b,v1.16b eor v1.16b,v3.16b,v8.16b eor v17.16b,v17.16b,v24.16b eor v24.16b,v27.16b,v9.16b eor v30.16b,v30.16b,v25.16b eor v25.16b,v28.16b,v10.16b eor v31.16b,v31.16b,v26.16b st1 {v4.16b},[x1],#16 eor v26.16b,v29.16b,v11.16b st1 {v5.16b},[x1],#16 mov w6,w5 st1 {v17.16b},[x1],#16 ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v30.16b},[x1],#16 st1 {v31.16b},[x1],#16 b.hs .Loop5x_xts_enc // If left 4 blocks, borrow the five block's processing. // This means if (x2 + 1 block) == 0, which is the case // when input size % 5 = 4, continue processing and do // another iteration in Loop5x_xts_enc which will exit from // cbz x6,.Lxts_enc_tail4x. // Otherwise, this is the end of the loop continue processing // 0, 1, 2 or 3 blocks (with or without tail) starting at // Loop5x_enc_after cmn x2,#0x10 b.ne .Loop5x_enc_after orr v11.16b,v10.16b,v10.16b orr v10.16b,v9.16b,v9.16b orr v9.16b,v8.16b,v8.16b orr v8.16b,v6.16b,v6.16b fmov x9,d11 fmov x10,v11.d[1] eor v0.16b,v6.16b,v2.16b eor v1.16b,v8.16b,v3.16b eor v24.16b,v27.16b,v9.16b eor v25.16b,v28.16b,v10.16b eor v26.16b,v29.16b,v11.16b b.eq .Loop5x_xts_enc .Loop5x_enc_after: add x2,x2,#0x50 cbz x2,.Lxts_enc_done // no blocks left add w6,w5,#2 subs x2,x2,#0x30 b.lo .Lxts_inner_enc_tail // 1 or 2 blocks left // (with tail or not) eor v0.16b,v6.16b,v27.16b // 3 blocks left eor v1.16b,v8.16b,v28.16b eor v24.16b,v29.16b,v9.16b b .Lxts_outer_enc_tail .align 4 .Lxts_enc_tail4x: add x0,x0,#16 eor v5.16b,v1.16b,v5.16b st1 {v5.16b},[x1],#16 eor v17.16b,v24.16b,v17.16b st1 {v17.16b},[x1],#16 eor v30.16b,v25.16b,v30.16b eor v31.16b,v26.16b,v31.16b st1 {v30.16b,v31.16b},[x1],#32 b .Lxts_enc_done .align 4 .Lxts_outer_enc_tail: aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b ld1 {v17.4s},[x7],#16 b.gt .Lxts_outer_enc_tail aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b eor v4.16b,v6.16b,v7.16b subs x2,x2,#0x30 // The iv for first block fmov x9,d9 fmov x10,v9.d[1] //mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d6,x9 fmov v6.d[1],x10 eor v5.16b,v8.16b,v7.16b csel x6,x2,x6,lo // x6, w6, is zero at this point aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b eor v17.16b,v9.16b,v7.16b add x6,x6,#0x20 add x0,x0,x6 mov x7,x3 aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v1.16b,v20.16b aesmc v1.16b,v1.16b aese v24.16b,v20.16b aesmc v24.16b,v24.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b aese v1.16b,v21.16b aesmc v1.16b,v1.16b aese v24.16b,v21.16b aesmc v24.16b,v24.16b aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v1.16b,v22.16b aesmc v1.16b,v1.16b aese v24.16b,v22.16b aesmc v24.16b,v24.16b aese v0.16b,v23.16b aese v1.16b,v23.16b aese v24.16b,v23.16b ld1 {v27.16b},[x0],#16 add w6,w5,#2 ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] eor v4.16b,v4.16b,v0.16b eor v5.16b,v5.16b,v1.16b eor v24.16b,v24.16b,v17.16b ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v4.16b},[x1],#16 st1 {v5.16b},[x1],#16 st1 {v24.16b},[x1],#16 cmn x2,#0x30 b.eq .Lxts_enc_done .Lxts_encxor_one: orr v28.16b,v3.16b,v3.16b orr v29.16b,v27.16b,v27.16b nop .Lxts_inner_enc_tail: cmn x2,#0x10 eor v1.16b,v28.16b,v6.16b eor v24.16b,v29.16b,v8.16b b.eq .Lxts_enc_tail_loop eor v24.16b,v29.16b,v6.16b .Lxts_enc_tail_loop: aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b ld1 {v17.4s},[x7],#16 b.gt .Lxts_enc_tail_loop aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b aese v1.16b,v20.16b aesmc v1.16b,v1.16b aese v24.16b,v20.16b aesmc v24.16b,v24.16b cmn x2,#0x20 aese v1.16b,v21.16b aesmc v1.16b,v1.16b aese v24.16b,v21.16b aesmc v24.16b,v24.16b eor v5.16b,v6.16b,v7.16b aese v1.16b,v22.16b aesmc v1.16b,v1.16b aese v24.16b,v22.16b aesmc v24.16b,v24.16b eor v17.16b,v8.16b,v7.16b aese v1.16b,v23.16b aese v24.16b,v23.16b b.eq .Lxts_enc_one eor v5.16b,v5.16b,v1.16b st1 {v5.16b},[x1],#16 eor v17.16b,v17.16b,v24.16b orr v6.16b,v8.16b,v8.16b st1 {v17.16b},[x1],#16 fmov x9,d8 fmov x10,v8.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d6,x9 fmov v6.d[1],x10 b .Lxts_enc_done .Lxts_enc_one: eor v5.16b,v5.16b,v24.16b orr v6.16b,v6.16b,v6.16b st1 {v5.16b},[x1],#16 fmov x9,d6 fmov x10,v6.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d6,x9 fmov v6.d[1],x10 b .Lxts_enc_done .align 5 .Lxts_enc_done: // Process the tail block with cipher stealing. tst x21,#0xf b.eq .Lxts_abort mov x20,x0 mov x13,x1 sub x1,x1,#16 .composite_enc_loop: subs x21,x21,#1 ldrb w15,[x1,x21] ldrb w14,[x20,x21] strb w15,[x13,x21] strb w14,[x1,x21] b.gt .composite_enc_loop .Lxts_enc_load_done: ld1 {v26.16b},[x1] eor v26.16b,v26.16b,v6.16b // Encrypt the composite block to get the last second encrypted text block ldr w6,[x3,#240] // load key schedule... ld1 {v0.16b},[x3],#16 sub w6,w6,#2 ld1 {v1.16b},[x3],#16 // load key schedule... .Loop_final_enc: aese v26.16b,v0.16b aesmc v26.16b,v26.16b ld1 {v0.4s},[x3],#16 subs w6,w6,#2 aese v26.16b,v1.16b aesmc v26.16b,v26.16b ld1 {v1.4s},[x3],#16 b.gt .Loop_final_enc aese v26.16b,v0.16b aesmc v26.16b,v26.16b ld1 {v0.4s},[x3] aese v26.16b,v1.16b eor v26.16b,v26.16b,v0.16b eor v26.16b,v26.16b,v6.16b st1 {v26.16b},[x1] .Lxts_abort: ldp x21,x22,[sp,#48] ldp d8,d9,[sp,#32] ldp d10,d11,[sp,#16] ldp x19,x20,[sp],#64 .Lxts_enc_final_abort: ret .size aes_hw_xts_encrypt,.-aes_hw_xts_encrypt .globl aes_hw_xts_decrypt .hidden aes_hw_xts_decrypt .type aes_hw_xts_decrypt,%function .align 5 aes_hw_xts_decrypt: AARCH64_VALID_CALL_TARGET cmp x2,#16 // Original input data size bigger than 16, jump to big size processing. b.ne .Lxts_dec_big_size // Encrypt the iv with key2, as the first XEX iv. ldr w6,[x4,#240] ld1 {v0.16b},[x4],#16 ld1 {v6.16b},[x5] sub w6,w6,#2 ld1 {v1.16b},[x4],#16 .Loop_dec_small_iv_enc: aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4],#16 subs w6,w6,#2 aese v6.16b,v1.16b aesmc v6.16b,v6.16b ld1 {v1.4s},[x4],#16 b.gt .Loop_dec_small_iv_enc aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4] aese v6.16b,v1.16b eor v6.16b,v6.16b,v0.16b ld1 {v0.16b},[x0] eor v0.16b,v6.16b,v0.16b ldr w6,[x3,#240] ld1 {v28.4s,v29.4s},[x3],#32 // load key schedule... aesd v0.16b,v28.16b aesimc v0.16b,v0.16b ld1 {v16.4s,v17.4s},[x3],#32 // load key schedule... aesd v0.16b,v29.16b aesimc v0.16b,v0.16b subs w6,w6,#10 // bias b.eq .Lxts_128_dec .Lxts_dec_round_loop: aesd v0.16b,v16.16b aesimc v0.16b,v0.16b ld1 {v16.4s},[x3],#16 // load key schedule... aesd v0.16b,v17.16b aesimc v0.16b,v0.16b ld1 {v17.4s},[x3],#16 // load key schedule... subs w6,w6,#2 // bias b.gt .Lxts_dec_round_loop .Lxts_128_dec: ld1 {v18.4s,v19.4s},[x3],#32 // load key schedule... aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v0.16b,v17.16b aesimc v0.16b,v0.16b ld1 {v20.4s,v21.4s},[x3],#32 // load key schedule... aesd v0.16b,v18.16b aesimc v0.16b,v0.16b aesd v0.16b,v19.16b aesimc v0.16b,v0.16b ld1 {v22.4s,v23.4s},[x3],#32 // load key schedule... aesd v0.16b,v20.16b aesimc v0.16b,v0.16b aesd v0.16b,v21.16b aesimc v0.16b,v0.16b ld1 {v7.4s},[x3] aesd v0.16b,v22.16b aesimc v0.16b,v0.16b aesd v0.16b,v23.16b eor v0.16b,v0.16b,v7.16b eor v0.16b,v6.16b,v0.16b st1 {v0.16b},[x1] b .Lxts_dec_final_abort .Lxts_dec_big_size: stp x19,x20,[sp,#-64]! stp x21,x22,[sp,#48] stp d8,d9,[sp,#32] stp d10,d11,[sp,#16] and x21,x2,#0xf and x2,x2,#-16 subs x2,x2,#16 mov x8,#16 b.lo .Lxts_dec_abort // Encrypt the iv with key2, as the first XEX iv ldr w6,[x4,#240] ld1 {v0.16b},[x4],#16 ld1 {v6.16b},[x5] sub w6,w6,#2 ld1 {v1.16b},[x4],#16 .Loop_dec_iv_enc: aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4],#16 subs w6,w6,#2 aese v6.16b,v1.16b aesmc v6.16b,v6.16b ld1 {v1.4s},[x4],#16 b.gt .Loop_dec_iv_enc aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4] aese v6.16b,v1.16b eor v6.16b,v6.16b,v0.16b // The iv for second block // x9- iv(low), x10 - iv(high) // the five ivs stored into, v6.16b,v8.16b,v9.16b,v10.16b,v11.16b fmov x9,d6 fmov x10,v6.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d8,x9 fmov v8.d[1],x10 ldr w5,[x3,#240] // load rounds number // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d9,x9 fmov v9.d[1],x10 ld1 {v16.4s,v17.4s},[x3] // load key schedule... sub w5,w5,#6 add x7,x3,x5,lsl#4 // pointer to last 7 round keys sub w5,w5,#2 ld1 {v18.4s,v19.4s},[x7],#32 // load key schedule... ld1 {v20.4s,v21.4s},[x7],#32 ld1 {v22.4s,v23.4s},[x7],#32 ld1 {v7.4s},[x7] // The iv for fourth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d10,x9 fmov v10.d[1],x10 add x7,x3,#32 mov w6,w5 b .Lxts_dec // Decryption .align 5 .Lxts_dec: tst x21,#0xf b.eq .Lxts_dec_begin subs x2,x2,#16 csel x8,xzr,x8,eq ld1 {v0.16b},[x0],#16 b.lo .Lxts_done sub x0,x0,#16 .Lxts_dec_begin: ld1 {v0.16b},[x0],x8 subs x2,x2,#32 // bias add w6,w5,#2 orr v3.16b,v0.16b,v0.16b orr v1.16b,v0.16b,v0.16b orr v28.16b,v0.16b,v0.16b ld1 {v24.16b},[x0],#16 orr v27.16b,v24.16b,v24.16b orr v29.16b,v24.16b,v24.16b b.lo .Lxts_inner_dec_tail eor v0.16b,v0.16b,v6.16b // before decryt, xor with iv eor v24.16b,v24.16b,v8.16b orr v1.16b,v24.16b,v24.16b ld1 {v24.16b},[x0],#16 orr v2.16b,v0.16b,v0.16b orr v3.16b,v1.16b,v1.16b eor v27.16b,v24.16b,v9.16b // third block xox with third iv eor v24.16b,v24.16b,v9.16b cmp x2,#32 b.lo .Lxts_outer_dec_tail ld1 {v25.16b},[x0],#16 // The iv for fifth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d11,x9 fmov v11.d[1],x10 ld1 {v26.16b},[x0],#16 eor v25.16b,v25.16b,v10.16b // the fourth block eor v26.16b,v26.16b,v11.16b sub x2,x2,#32 // bias mov w6,w5 b .Loop5x_xts_dec .align 4 .Loop5x_xts_dec: aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b aesd v25.16b,v16.16b aesimc v25.16b,v25.16b aesd v26.16b,v16.16b aesimc v26.16b,v26.16b ld1 {v16.4s},[x7],#16 // load key schedule... subs w6,w6,#2 aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b aesd v25.16b,v17.16b aesimc v25.16b,v25.16b aesd v26.16b,v17.16b aesimc v26.16b,v26.16b ld1 {v17.4s},[x7],#16 // load key schedule... b.gt .Loop5x_xts_dec aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b aesd v25.16b,v16.16b aesimc v25.16b,v25.16b aesd v26.16b,v16.16b aesimc v26.16b,v26.16b subs x2,x2,#0x50 // because .Lxts_dec_tail4x aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b aesd v25.16b,v17.16b aesimc v25.16b,v25.16b aesd v26.16b,v17.16b aesimc v26.16b,v26.16b csel x6,xzr,x2,gt // borrow x6, w6, "gt" is not typo mov x7,x3 aesd v0.16b,v18.16b aesimc v0.16b,v0.16b aesd v1.16b,v18.16b aesimc v1.16b,v1.16b aesd v24.16b,v18.16b aesimc v24.16b,v24.16b aesd v25.16b,v18.16b aesimc v25.16b,v25.16b aesd v26.16b,v18.16b aesimc v26.16b,v26.16b add x0,x0,x6 // x0 is adjusted in such way that // at exit from the loop v1.16b-v26.16b // are loaded with last "words" add x6,x2,#0x60 // because .Lxts_dec_tail4x aesd v0.16b,v19.16b aesimc v0.16b,v0.16b aesd v1.16b,v19.16b aesimc v1.16b,v1.16b aesd v24.16b,v19.16b aesimc v24.16b,v24.16b aesd v25.16b,v19.16b aesimc v25.16b,v25.16b aesd v26.16b,v19.16b aesimc v26.16b,v26.16b aesd v0.16b,v20.16b aesimc v0.16b,v0.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v24.16b,v20.16b aesimc v24.16b,v24.16b aesd v25.16b,v20.16b aesimc v25.16b,v25.16b aesd v26.16b,v20.16b aesimc v26.16b,v26.16b aesd v0.16b,v21.16b aesimc v0.16b,v0.16b aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v24.16b,v21.16b aesimc v24.16b,v24.16b aesd v25.16b,v21.16b aesimc v25.16b,v25.16b aesd v26.16b,v21.16b aesimc v26.16b,v26.16b aesd v0.16b,v22.16b aesimc v0.16b,v0.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v24.16b,v22.16b aesimc v24.16b,v24.16b aesd v25.16b,v22.16b aesimc v25.16b,v25.16b aesd v26.16b,v22.16b aesimc v26.16b,v26.16b eor v4.16b,v7.16b,v6.16b aesd v0.16b,v23.16b // The iv for first block of next iteration. extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d6,x9 fmov v6.d[1],x10 eor v5.16b,v7.16b,v8.16b ld1 {v2.16b},[x0],#16 aesd v1.16b,v23.16b // The iv for second block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d8,x9 fmov v8.d[1],x10 eor v17.16b,v7.16b,v9.16b ld1 {v3.16b},[x0],#16 aesd v24.16b,v23.16b // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d9,x9 fmov v9.d[1],x10 eor v30.16b,v7.16b,v10.16b ld1 {v27.16b},[x0],#16 aesd v25.16b,v23.16b // The iv for fourth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d10,x9 fmov v10.d[1],x10 eor v31.16b,v7.16b,v11.16b ld1 {v28.16b},[x0],#16 aesd v26.16b,v23.16b // The iv for fifth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d11,x9 fmov v11.d[1],x10 ld1 {v29.16b},[x0],#16 cbz x6,.Lxts_dec_tail4x ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] eor v4.16b,v4.16b,v0.16b eor v0.16b,v2.16b,v6.16b eor v5.16b,v5.16b,v1.16b eor v1.16b,v3.16b,v8.16b eor v17.16b,v17.16b,v24.16b eor v24.16b,v27.16b,v9.16b eor v30.16b,v30.16b,v25.16b eor v25.16b,v28.16b,v10.16b eor v31.16b,v31.16b,v26.16b st1 {v4.16b},[x1],#16 eor v26.16b,v29.16b,v11.16b st1 {v5.16b},[x1],#16 mov w6,w5 st1 {v17.16b},[x1],#16 ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v30.16b},[x1],#16 st1 {v31.16b},[x1],#16 b.hs .Loop5x_xts_dec cmn x2,#0x10 b.ne .Loop5x_dec_after // If x2(x2) equal to -0x10, the left blocks is 4. // After specially processing, utilize the five blocks processing again. // It will use the following IVs: v6.16b,v6.16b,v8.16b,v9.16b,v10.16b. orr v11.16b,v10.16b,v10.16b orr v10.16b,v9.16b,v9.16b orr v9.16b,v8.16b,v8.16b orr v8.16b,v6.16b,v6.16b fmov x9,d11 fmov x10,v11.d[1] eor v0.16b,v6.16b,v2.16b eor v1.16b,v8.16b,v3.16b eor v24.16b,v27.16b,v9.16b eor v25.16b,v28.16b,v10.16b eor v26.16b,v29.16b,v11.16b b.eq .Loop5x_xts_dec .Loop5x_dec_after: add x2,x2,#0x50 cbz x2,.Lxts_done add w6,w5,#2 subs x2,x2,#0x30 b.lo .Lxts_inner_dec_tail eor v0.16b,v6.16b,v27.16b eor v1.16b,v8.16b,v28.16b eor v24.16b,v29.16b,v9.16b b .Lxts_outer_dec_tail .align 4 .Lxts_dec_tail4x: add x0,x0,#16 tst x21,#0xf eor v5.16b,v1.16b,v4.16b st1 {v5.16b},[x1],#16 eor v17.16b,v24.16b,v17.16b st1 {v17.16b},[x1],#16 eor v30.16b,v25.16b,v30.16b eor v31.16b,v26.16b,v31.16b st1 {v30.16b,v31.16b},[x1],#32 b.eq .Lxts_dec_abort ld1 {v0.4s},[x0],#16 b .Lxts_done .align 4 .Lxts_outer_dec_tail: aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b ld1 {v17.4s},[x7],#16 b.gt .Lxts_outer_dec_tail aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b eor v4.16b,v6.16b,v7.16b subs x2,x2,#0x30 // The iv for first block fmov x9,d9 fmov x10,v9.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d6,x9 fmov v6.d[1],x10 eor v5.16b,v8.16b,v7.16b csel x6,x2,x6,lo // x6, w6, is zero at this point aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b eor v17.16b,v9.16b,v7.16b // The iv for second block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d8,x9 fmov v8.d[1],x10 add x6,x6,#0x20 add x0,x0,x6 // x0 is adjusted to the last data mov x7,x3 // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d9,x9 fmov v9.d[1],x10 aesd v0.16b,v20.16b aesimc v0.16b,v0.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v24.16b,v20.16b aesimc v24.16b,v24.16b aesd v0.16b,v21.16b aesimc v0.16b,v0.16b aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v24.16b,v21.16b aesimc v24.16b,v24.16b aesd v0.16b,v22.16b aesimc v0.16b,v0.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v24.16b,v22.16b aesimc v24.16b,v24.16b ld1 {v27.16b},[x0],#16 aesd v0.16b,v23.16b aesd v1.16b,v23.16b aesd v24.16b,v23.16b ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] add w6,w5,#2 eor v4.16b,v4.16b,v0.16b eor v5.16b,v5.16b,v1.16b eor v24.16b,v24.16b,v17.16b ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v4.16b},[x1],#16 st1 {v5.16b},[x1],#16 st1 {v24.16b},[x1],#16 cmn x2,#0x30 add x2,x2,#0x30 b.eq .Lxts_done sub x2,x2,#0x30 orr v28.16b,v3.16b,v3.16b orr v29.16b,v27.16b,v27.16b nop .Lxts_inner_dec_tail: // x2 == -0x10 means two blocks left. cmn x2,#0x10 eor v1.16b,v28.16b,v6.16b eor v24.16b,v29.16b,v8.16b b.eq .Lxts_dec_tail_loop eor v24.16b,v29.16b,v6.16b .Lxts_dec_tail_loop: aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b ld1 {v17.4s},[x7],#16 b.gt .Lxts_dec_tail_loop aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v24.16b,v20.16b aesimc v24.16b,v24.16b cmn x2,#0x20 aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v24.16b,v21.16b aesimc v24.16b,v24.16b eor v5.16b,v6.16b,v7.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v24.16b,v22.16b aesimc v24.16b,v24.16b eor v17.16b,v8.16b,v7.16b aesd v1.16b,v23.16b aesd v24.16b,v23.16b b.eq .Lxts_dec_one eor v5.16b,v5.16b,v1.16b eor v17.16b,v17.16b,v24.16b orr v6.16b,v9.16b,v9.16b orr v8.16b,v10.16b,v10.16b st1 {v5.16b},[x1],#16 st1 {v17.16b},[x1],#16 add x2,x2,#16 b .Lxts_done .Lxts_dec_one: eor v5.16b,v5.16b,v24.16b orr v6.16b,v8.16b,v8.16b orr v8.16b,v9.16b,v9.16b st1 {v5.16b},[x1],#16 add x2,x2,#32 .Lxts_done: tst x21,#0xf b.eq .Lxts_dec_abort // Processing the last two blocks with cipher stealing. mov x7,x3 cbnz x2,.Lxts_dec_1st_done ld1 {v0.4s},[x0],#16 // Decrypt the last secod block to get the last plain text block .Lxts_dec_1st_done: eor v26.16b,v0.16b,v8.16b ldr w6,[x3,#240] ld1 {v0.4s},[x3],#16 sub w6,w6,#2 ld1 {v1.4s},[x3],#16 .Loop_final_2nd_dec: aesd v26.16b,v0.16b aesimc v26.16b,v26.16b ld1 {v0.4s},[x3],#16 // load key schedule... subs w6,w6,#2 aesd v26.16b,v1.16b aesimc v26.16b,v26.16b ld1 {v1.4s},[x3],#16 // load key schedule... b.gt .Loop_final_2nd_dec aesd v26.16b,v0.16b aesimc v26.16b,v26.16b ld1 {v0.4s},[x3] aesd v26.16b,v1.16b eor v26.16b,v26.16b,v0.16b eor v26.16b,v26.16b,v8.16b st1 {v26.16b},[x1] mov x20,x0 add x13,x1,#16 // Composite the tailcnt "16 byte not aligned block" into the last second plain blocks // to get the last encrypted block. .composite_dec_loop: subs x21,x21,#1 ldrb w15,[x1,x21] ldrb w14,[x20,x21] strb w15,[x13,x21] strb w14,[x1,x21] b.gt .composite_dec_loop .Lxts_dec_load_done: ld1 {v26.16b},[x1] eor v26.16b,v26.16b,v6.16b // Decrypt the composite block to get the last second plain text block ldr w6,[x7,#240] ld1 {v0.16b},[x7],#16 sub w6,w6,#2 ld1 {v1.16b},[x7],#16 .Loop_final_dec: aesd v26.16b,v0.16b aesimc v26.16b,v26.16b ld1 {v0.4s},[x7],#16 // load key schedule... subs w6,w6,#2 aesd v26.16b,v1.16b aesimc v26.16b,v26.16b ld1 {v1.4s},[x7],#16 // load key schedule... b.gt .Loop_final_dec aesd v26.16b,v0.16b aesimc v26.16b,v26.16b ld1 {v0.4s},[x7] aesd v26.16b,v1.16b eor v26.16b,v26.16b,v0.16b eor v26.16b,v26.16b,v6.16b st1 {v26.16b},[x1] .Lxts_dec_abort: ldp x21,x22,[sp,#48] ldp d8,d9,[sp,#32] ldp d10,d11,[sp,#16] ldp x19,x20,[sp],#64 .Lxts_dec_final_abort: ret .size aes_hw_xts_decrypt,.-aes_hw_xts_decrypt #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
34,363
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/sha256-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) // Copyright 2014-2020 The OpenSSL Project Authors. All Rights Reserved. // // Licensed under the OpenSSL license (the "License"). You may not use // this file except in compliance with the License. You can obtain a copy // in the file LICENSE in the source distribution or at // https://www.openssl.org/source/license.html // ==================================================================== // Written by Andy Polyakov <appro@openssl.org> for the OpenSSL // project. The module is, however, dual licensed under OpenSSL and // CRYPTOGAMS licenses depending on where you obtain it. For further // details see http://www.openssl.org/~appro/cryptogams/. // // Permission to use under GPLv2 terms is granted. // ==================================================================== // // SHA256/512 for ARMv8. // // Performance in cycles per processed byte and improvement coefficient // over code generated with "default" compiler: // // SHA256-hw SHA256(*) SHA512 // Apple A7 1.97 10.5 (+33%) 6.73 (-1%(**)) // Cortex-A53 2.38 15.5 (+115%) 10.0 (+150%(***)) // Cortex-A57 2.31 11.6 (+86%) 7.51 (+260%(***)) // Denver 2.01 10.5 (+26%) 6.70 (+8%) // X-Gene 20.0 (+100%) 12.8 (+300%(***)) // Mongoose 2.36 13.0 (+50%) 8.36 (+33%) // Kryo 1.92 17.4 (+30%) 11.2 (+8%) // // (*) Software SHA256 results are of lesser relevance, presented // mostly for informational purposes. // (**) The result is a trade-off: it's possible to improve it by // 10% (or by 1 cycle per round), but at the cost of 20% loss // on Cortex-A53 (or by 4 cycles per round). // (***) Super-impressive coefficients over gcc-generated code are // indication of some compiler "pathology", most notably code // generated with -mgeneral-regs-only is significantly faster // and the gap is only 40-90%. #ifndef __KERNEL__ # include <openssl/arm_arch.h> #endif .text .globl sha256_block_data_order_nohw .hidden sha256_block_data_order_nohw .type sha256_block_data_order_nohw,%function .align 6 sha256_block_data_order_nohw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#4*4 ldp w20,w21,[x0] // load context ldp w22,w23,[x0,#2*4] ldp w24,w25,[x0,#4*4] add x2,x1,x2,lsl#6 // end of input ldp w26,w27,[x0,#6*4] adrp x30,.LK256 add x30,x30,:lo12:.LK256 stp x0,x2,[x29,#96] .Loop: ldp w3,w4,[x1],#2*4 ldr w19,[x30],#4 // *K++ eor w28,w21,w22 // magic seed str x1,[x29,#112] #ifndef __AARCH64EB__ rev w3,w3 // 0 #endif ror w16,w24,#6 add w27,w27,w19 // h+=K[i] eor w6,w24,w24,ror#14 and w17,w25,w24 bic w19,w26,w24 add w27,w27,w3 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w20,w21 // a^b, b^c in next round eor w16,w16,w6,ror#11 // Sigma1(e) ror w6,w20,#2 add w27,w27,w17 // h+=Ch(e,f,g) eor w17,w20,w20,ror#9 add w27,w27,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w23,w23,w27 // d+=h eor w28,w28,w21 // Maj(a,b,c) eor w17,w6,w17,ror#13 // Sigma0(a) add w27,w27,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w27,w27,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w4,w4 // 1 #endif ldp w5,w6,[x1],#2*4 add w27,w27,w17 // h+=Sigma0(a) ror w16,w23,#6 add w26,w26,w28 // h+=K[i] eor w7,w23,w23,ror#14 and w17,w24,w23 bic w28,w25,w23 add w26,w26,w4 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w27,w20 // a^b, b^c in next round eor w16,w16,w7,ror#11 // Sigma1(e) ror w7,w27,#2 add w26,w26,w17 // h+=Ch(e,f,g) eor w17,w27,w27,ror#9 add w26,w26,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w22,w22,w26 // d+=h eor w19,w19,w20 // Maj(a,b,c) eor w17,w7,w17,ror#13 // Sigma0(a) add w26,w26,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w26,w26,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w5,w5 // 2 #endif add w26,w26,w17 // h+=Sigma0(a) ror w16,w22,#6 add w25,w25,w19 // h+=K[i] eor w8,w22,w22,ror#14 and w17,w23,w22 bic w19,w24,w22 add w25,w25,w5 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w26,w27 // a^b, b^c in next round eor w16,w16,w8,ror#11 // Sigma1(e) ror w8,w26,#2 add w25,w25,w17 // h+=Ch(e,f,g) eor w17,w26,w26,ror#9 add w25,w25,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w21,w21,w25 // d+=h eor w28,w28,w27 // Maj(a,b,c) eor w17,w8,w17,ror#13 // Sigma0(a) add w25,w25,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w25,w25,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w6,w6 // 3 #endif ldp w7,w8,[x1],#2*4 add w25,w25,w17 // h+=Sigma0(a) ror w16,w21,#6 add w24,w24,w28 // h+=K[i] eor w9,w21,w21,ror#14 and w17,w22,w21 bic w28,w23,w21 add w24,w24,w6 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w25,w26 // a^b, b^c in next round eor w16,w16,w9,ror#11 // Sigma1(e) ror w9,w25,#2 add w24,w24,w17 // h+=Ch(e,f,g) eor w17,w25,w25,ror#9 add w24,w24,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w20,w20,w24 // d+=h eor w19,w19,w26 // Maj(a,b,c) eor w17,w9,w17,ror#13 // Sigma0(a) add w24,w24,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w24,w24,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w7,w7 // 4 #endif add w24,w24,w17 // h+=Sigma0(a) ror w16,w20,#6 add w23,w23,w19 // h+=K[i] eor w10,w20,w20,ror#14 and w17,w21,w20 bic w19,w22,w20 add w23,w23,w7 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w24,w25 // a^b, b^c in next round eor w16,w16,w10,ror#11 // Sigma1(e) ror w10,w24,#2 add w23,w23,w17 // h+=Ch(e,f,g) eor w17,w24,w24,ror#9 add w23,w23,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w27,w27,w23 // d+=h eor w28,w28,w25 // Maj(a,b,c) eor w17,w10,w17,ror#13 // Sigma0(a) add w23,w23,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w23,w23,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w8,w8 // 5 #endif ldp w9,w10,[x1],#2*4 add w23,w23,w17 // h+=Sigma0(a) ror w16,w27,#6 add w22,w22,w28 // h+=K[i] eor w11,w27,w27,ror#14 and w17,w20,w27 bic w28,w21,w27 add w22,w22,w8 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w23,w24 // a^b, b^c in next round eor w16,w16,w11,ror#11 // Sigma1(e) ror w11,w23,#2 add w22,w22,w17 // h+=Ch(e,f,g) eor w17,w23,w23,ror#9 add w22,w22,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w26,w26,w22 // d+=h eor w19,w19,w24 // Maj(a,b,c) eor w17,w11,w17,ror#13 // Sigma0(a) add w22,w22,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w22,w22,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w9,w9 // 6 #endif add w22,w22,w17 // h+=Sigma0(a) ror w16,w26,#6 add w21,w21,w19 // h+=K[i] eor w12,w26,w26,ror#14 and w17,w27,w26 bic w19,w20,w26 add w21,w21,w9 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w22,w23 // a^b, b^c in next round eor w16,w16,w12,ror#11 // Sigma1(e) ror w12,w22,#2 add w21,w21,w17 // h+=Ch(e,f,g) eor w17,w22,w22,ror#9 add w21,w21,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w25,w25,w21 // d+=h eor w28,w28,w23 // Maj(a,b,c) eor w17,w12,w17,ror#13 // Sigma0(a) add w21,w21,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w21,w21,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w10,w10 // 7 #endif ldp w11,w12,[x1],#2*4 add w21,w21,w17 // h+=Sigma0(a) ror w16,w25,#6 add w20,w20,w28 // h+=K[i] eor w13,w25,w25,ror#14 and w17,w26,w25 bic w28,w27,w25 add w20,w20,w10 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w21,w22 // a^b, b^c in next round eor w16,w16,w13,ror#11 // Sigma1(e) ror w13,w21,#2 add w20,w20,w17 // h+=Ch(e,f,g) eor w17,w21,w21,ror#9 add w20,w20,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w24,w24,w20 // d+=h eor w19,w19,w22 // Maj(a,b,c) eor w17,w13,w17,ror#13 // Sigma0(a) add w20,w20,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w20,w20,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w11,w11 // 8 #endif add w20,w20,w17 // h+=Sigma0(a) ror w16,w24,#6 add w27,w27,w19 // h+=K[i] eor w14,w24,w24,ror#14 and w17,w25,w24 bic w19,w26,w24 add w27,w27,w11 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w20,w21 // a^b, b^c in next round eor w16,w16,w14,ror#11 // Sigma1(e) ror w14,w20,#2 add w27,w27,w17 // h+=Ch(e,f,g) eor w17,w20,w20,ror#9 add w27,w27,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w23,w23,w27 // d+=h eor w28,w28,w21 // Maj(a,b,c) eor w17,w14,w17,ror#13 // Sigma0(a) add w27,w27,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w27,w27,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w12,w12 // 9 #endif ldp w13,w14,[x1],#2*4 add w27,w27,w17 // h+=Sigma0(a) ror w16,w23,#6 add w26,w26,w28 // h+=K[i] eor w15,w23,w23,ror#14 and w17,w24,w23 bic w28,w25,w23 add w26,w26,w12 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w27,w20 // a^b, b^c in next round eor w16,w16,w15,ror#11 // Sigma1(e) ror w15,w27,#2 add w26,w26,w17 // h+=Ch(e,f,g) eor w17,w27,w27,ror#9 add w26,w26,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w22,w22,w26 // d+=h eor w19,w19,w20 // Maj(a,b,c) eor w17,w15,w17,ror#13 // Sigma0(a) add w26,w26,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w26,w26,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w13,w13 // 10 #endif add w26,w26,w17 // h+=Sigma0(a) ror w16,w22,#6 add w25,w25,w19 // h+=K[i] eor w0,w22,w22,ror#14 and w17,w23,w22 bic w19,w24,w22 add w25,w25,w13 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w26,w27 // a^b, b^c in next round eor w16,w16,w0,ror#11 // Sigma1(e) ror w0,w26,#2 add w25,w25,w17 // h+=Ch(e,f,g) eor w17,w26,w26,ror#9 add w25,w25,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w21,w21,w25 // d+=h eor w28,w28,w27 // Maj(a,b,c) eor w17,w0,w17,ror#13 // Sigma0(a) add w25,w25,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w25,w25,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w14,w14 // 11 #endif ldp w15,w0,[x1],#2*4 add w25,w25,w17 // h+=Sigma0(a) str w6,[sp,#12] ror w16,w21,#6 add w24,w24,w28 // h+=K[i] eor w6,w21,w21,ror#14 and w17,w22,w21 bic w28,w23,w21 add w24,w24,w14 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w25,w26 // a^b, b^c in next round eor w16,w16,w6,ror#11 // Sigma1(e) ror w6,w25,#2 add w24,w24,w17 // h+=Ch(e,f,g) eor w17,w25,w25,ror#9 add w24,w24,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w20,w20,w24 // d+=h eor w19,w19,w26 // Maj(a,b,c) eor w17,w6,w17,ror#13 // Sigma0(a) add w24,w24,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w24,w24,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w15,w15 // 12 #endif add w24,w24,w17 // h+=Sigma0(a) str w7,[sp,#0] ror w16,w20,#6 add w23,w23,w19 // h+=K[i] eor w7,w20,w20,ror#14 and w17,w21,w20 bic w19,w22,w20 add w23,w23,w15 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w24,w25 // a^b, b^c in next round eor w16,w16,w7,ror#11 // Sigma1(e) ror w7,w24,#2 add w23,w23,w17 // h+=Ch(e,f,g) eor w17,w24,w24,ror#9 add w23,w23,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w27,w27,w23 // d+=h eor w28,w28,w25 // Maj(a,b,c) eor w17,w7,w17,ror#13 // Sigma0(a) add w23,w23,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w23,w23,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w0,w0 // 13 #endif ldp w1,w2,[x1] add w23,w23,w17 // h+=Sigma0(a) str w8,[sp,#4] ror w16,w27,#6 add w22,w22,w28 // h+=K[i] eor w8,w27,w27,ror#14 and w17,w20,w27 bic w28,w21,w27 add w22,w22,w0 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w23,w24 // a^b, b^c in next round eor w16,w16,w8,ror#11 // Sigma1(e) ror w8,w23,#2 add w22,w22,w17 // h+=Ch(e,f,g) eor w17,w23,w23,ror#9 add w22,w22,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w26,w26,w22 // d+=h eor w19,w19,w24 // Maj(a,b,c) eor w17,w8,w17,ror#13 // Sigma0(a) add w22,w22,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w22,w22,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w1,w1 // 14 #endif ldr w6,[sp,#12] add w22,w22,w17 // h+=Sigma0(a) str w9,[sp,#8] ror w16,w26,#6 add w21,w21,w19 // h+=K[i] eor w9,w26,w26,ror#14 and w17,w27,w26 bic w19,w20,w26 add w21,w21,w1 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w22,w23 // a^b, b^c in next round eor w16,w16,w9,ror#11 // Sigma1(e) ror w9,w22,#2 add w21,w21,w17 // h+=Ch(e,f,g) eor w17,w22,w22,ror#9 add w21,w21,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w25,w25,w21 // d+=h eor w28,w28,w23 // Maj(a,b,c) eor w17,w9,w17,ror#13 // Sigma0(a) add w21,w21,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w21,w21,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w2,w2 // 15 #endif ldr w7,[sp,#0] add w21,w21,w17 // h+=Sigma0(a) str w10,[sp,#12] ror w16,w25,#6 add w20,w20,w28 // h+=K[i] ror w9,w4,#7 and w17,w26,w25 ror w8,w1,#17 bic w28,w27,w25 ror w10,w21,#2 add w20,w20,w2 // h+=X[i] eor w16,w16,w25,ror#11 eor w9,w9,w4,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w21,w22 // a^b, b^c in next round eor w16,w16,w25,ror#25 // Sigma1(e) eor w10,w10,w21,ror#13 add w20,w20,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w8,w8,w1,ror#19 eor w9,w9,w4,lsr#3 // sigma0(X[i+1]) add w20,w20,w16 // h+=Sigma1(e) eor w19,w19,w22 // Maj(a,b,c) eor w17,w10,w21,ror#22 // Sigma0(a) eor w8,w8,w1,lsr#10 // sigma1(X[i+14]) add w3,w3,w12 add w24,w24,w20 // d+=h add w20,w20,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w3,w3,w9 add w20,w20,w17 // h+=Sigma0(a) add w3,w3,w8 .Loop_16_xx: ldr w8,[sp,#4] str w11,[sp,#0] ror w16,w24,#6 add w27,w27,w19 // h+=K[i] ror w10,w5,#7 and w17,w25,w24 ror w9,w2,#17 bic w19,w26,w24 ror w11,w20,#2 add w27,w27,w3 // h+=X[i] eor w16,w16,w24,ror#11 eor w10,w10,w5,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w20,w21 // a^b, b^c in next round eor w16,w16,w24,ror#25 // Sigma1(e) eor w11,w11,w20,ror#13 add w27,w27,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w9,w9,w2,ror#19 eor w10,w10,w5,lsr#3 // sigma0(X[i+1]) add w27,w27,w16 // h+=Sigma1(e) eor w28,w28,w21 // Maj(a,b,c) eor w17,w11,w20,ror#22 // Sigma0(a) eor w9,w9,w2,lsr#10 // sigma1(X[i+14]) add w4,w4,w13 add w23,w23,w27 // d+=h add w27,w27,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w4,w4,w10 add w27,w27,w17 // h+=Sigma0(a) add w4,w4,w9 ldr w9,[sp,#8] str w12,[sp,#4] ror w16,w23,#6 add w26,w26,w28 // h+=K[i] ror w11,w6,#7 and w17,w24,w23 ror w10,w3,#17 bic w28,w25,w23 ror w12,w27,#2 add w26,w26,w4 // h+=X[i] eor w16,w16,w23,ror#11 eor w11,w11,w6,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w27,w20 // a^b, b^c in next round eor w16,w16,w23,ror#25 // Sigma1(e) eor w12,w12,w27,ror#13 add w26,w26,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w10,w10,w3,ror#19 eor w11,w11,w6,lsr#3 // sigma0(X[i+1]) add w26,w26,w16 // h+=Sigma1(e) eor w19,w19,w20 // Maj(a,b,c) eor w17,w12,w27,ror#22 // Sigma0(a) eor w10,w10,w3,lsr#10 // sigma1(X[i+14]) add w5,w5,w14 add w22,w22,w26 // d+=h add w26,w26,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w5,w5,w11 add w26,w26,w17 // h+=Sigma0(a) add w5,w5,w10 ldr w10,[sp,#12] str w13,[sp,#8] ror w16,w22,#6 add w25,w25,w19 // h+=K[i] ror w12,w7,#7 and w17,w23,w22 ror w11,w4,#17 bic w19,w24,w22 ror w13,w26,#2 add w25,w25,w5 // h+=X[i] eor w16,w16,w22,ror#11 eor w12,w12,w7,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w26,w27 // a^b, b^c in next round eor w16,w16,w22,ror#25 // Sigma1(e) eor w13,w13,w26,ror#13 add w25,w25,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w11,w11,w4,ror#19 eor w12,w12,w7,lsr#3 // sigma0(X[i+1]) add w25,w25,w16 // h+=Sigma1(e) eor w28,w28,w27 // Maj(a,b,c) eor w17,w13,w26,ror#22 // Sigma0(a) eor w11,w11,w4,lsr#10 // sigma1(X[i+14]) add w6,w6,w15 add w21,w21,w25 // d+=h add w25,w25,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w6,w6,w12 add w25,w25,w17 // h+=Sigma0(a) add w6,w6,w11 ldr w11,[sp,#0] str w14,[sp,#12] ror w16,w21,#6 add w24,w24,w28 // h+=K[i] ror w13,w8,#7 and w17,w22,w21 ror w12,w5,#17 bic w28,w23,w21 ror w14,w25,#2 add w24,w24,w6 // h+=X[i] eor w16,w16,w21,ror#11 eor w13,w13,w8,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w25,w26 // a^b, b^c in next round eor w16,w16,w21,ror#25 // Sigma1(e) eor w14,w14,w25,ror#13 add w24,w24,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w12,w12,w5,ror#19 eor w13,w13,w8,lsr#3 // sigma0(X[i+1]) add w24,w24,w16 // h+=Sigma1(e) eor w19,w19,w26 // Maj(a,b,c) eor w17,w14,w25,ror#22 // Sigma0(a) eor w12,w12,w5,lsr#10 // sigma1(X[i+14]) add w7,w7,w0 add w20,w20,w24 // d+=h add w24,w24,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w7,w7,w13 add w24,w24,w17 // h+=Sigma0(a) add w7,w7,w12 ldr w12,[sp,#4] str w15,[sp,#0] ror w16,w20,#6 add w23,w23,w19 // h+=K[i] ror w14,w9,#7 and w17,w21,w20 ror w13,w6,#17 bic w19,w22,w20 ror w15,w24,#2 add w23,w23,w7 // h+=X[i] eor w16,w16,w20,ror#11 eor w14,w14,w9,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w24,w25 // a^b, b^c in next round eor w16,w16,w20,ror#25 // Sigma1(e) eor w15,w15,w24,ror#13 add w23,w23,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w13,w13,w6,ror#19 eor w14,w14,w9,lsr#3 // sigma0(X[i+1]) add w23,w23,w16 // h+=Sigma1(e) eor w28,w28,w25 // Maj(a,b,c) eor w17,w15,w24,ror#22 // Sigma0(a) eor w13,w13,w6,lsr#10 // sigma1(X[i+14]) add w8,w8,w1 add w27,w27,w23 // d+=h add w23,w23,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w8,w8,w14 add w23,w23,w17 // h+=Sigma0(a) add w8,w8,w13 ldr w13,[sp,#8] str w0,[sp,#4] ror w16,w27,#6 add w22,w22,w28 // h+=K[i] ror w15,w10,#7 and w17,w20,w27 ror w14,w7,#17 bic w28,w21,w27 ror w0,w23,#2 add w22,w22,w8 // h+=X[i] eor w16,w16,w27,ror#11 eor w15,w15,w10,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w23,w24 // a^b, b^c in next round eor w16,w16,w27,ror#25 // Sigma1(e) eor w0,w0,w23,ror#13 add w22,w22,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w14,w14,w7,ror#19 eor w15,w15,w10,lsr#3 // sigma0(X[i+1]) add w22,w22,w16 // h+=Sigma1(e) eor w19,w19,w24 // Maj(a,b,c) eor w17,w0,w23,ror#22 // Sigma0(a) eor w14,w14,w7,lsr#10 // sigma1(X[i+14]) add w9,w9,w2 add w26,w26,w22 // d+=h add w22,w22,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w9,w9,w15 add w22,w22,w17 // h+=Sigma0(a) add w9,w9,w14 ldr w14,[sp,#12] str w1,[sp,#8] ror w16,w26,#6 add w21,w21,w19 // h+=K[i] ror w0,w11,#7 and w17,w27,w26 ror w15,w8,#17 bic w19,w20,w26 ror w1,w22,#2 add w21,w21,w9 // h+=X[i] eor w16,w16,w26,ror#11 eor w0,w0,w11,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w22,w23 // a^b, b^c in next round eor w16,w16,w26,ror#25 // Sigma1(e) eor w1,w1,w22,ror#13 add w21,w21,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w15,w15,w8,ror#19 eor w0,w0,w11,lsr#3 // sigma0(X[i+1]) add w21,w21,w16 // h+=Sigma1(e) eor w28,w28,w23 // Maj(a,b,c) eor w17,w1,w22,ror#22 // Sigma0(a) eor w15,w15,w8,lsr#10 // sigma1(X[i+14]) add w10,w10,w3 add w25,w25,w21 // d+=h add w21,w21,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w10,w10,w0 add w21,w21,w17 // h+=Sigma0(a) add w10,w10,w15 ldr w15,[sp,#0] str w2,[sp,#12] ror w16,w25,#6 add w20,w20,w28 // h+=K[i] ror w1,w12,#7 and w17,w26,w25 ror w0,w9,#17 bic w28,w27,w25 ror w2,w21,#2 add w20,w20,w10 // h+=X[i] eor w16,w16,w25,ror#11 eor w1,w1,w12,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w21,w22 // a^b, b^c in next round eor w16,w16,w25,ror#25 // Sigma1(e) eor w2,w2,w21,ror#13 add w20,w20,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w0,w0,w9,ror#19 eor w1,w1,w12,lsr#3 // sigma0(X[i+1]) add w20,w20,w16 // h+=Sigma1(e) eor w19,w19,w22 // Maj(a,b,c) eor w17,w2,w21,ror#22 // Sigma0(a) eor w0,w0,w9,lsr#10 // sigma1(X[i+14]) add w11,w11,w4 add w24,w24,w20 // d+=h add w20,w20,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w11,w11,w1 add w20,w20,w17 // h+=Sigma0(a) add w11,w11,w0 ldr w0,[sp,#4] str w3,[sp,#0] ror w16,w24,#6 add w27,w27,w19 // h+=K[i] ror w2,w13,#7 and w17,w25,w24 ror w1,w10,#17 bic w19,w26,w24 ror w3,w20,#2 add w27,w27,w11 // h+=X[i] eor w16,w16,w24,ror#11 eor w2,w2,w13,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w20,w21 // a^b, b^c in next round eor w16,w16,w24,ror#25 // Sigma1(e) eor w3,w3,w20,ror#13 add w27,w27,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w1,w1,w10,ror#19 eor w2,w2,w13,lsr#3 // sigma0(X[i+1]) add w27,w27,w16 // h+=Sigma1(e) eor w28,w28,w21 // Maj(a,b,c) eor w17,w3,w20,ror#22 // Sigma0(a) eor w1,w1,w10,lsr#10 // sigma1(X[i+14]) add w12,w12,w5 add w23,w23,w27 // d+=h add w27,w27,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w12,w12,w2 add w27,w27,w17 // h+=Sigma0(a) add w12,w12,w1 ldr w1,[sp,#8] str w4,[sp,#4] ror w16,w23,#6 add w26,w26,w28 // h+=K[i] ror w3,w14,#7 and w17,w24,w23 ror w2,w11,#17 bic w28,w25,w23 ror w4,w27,#2 add w26,w26,w12 // h+=X[i] eor w16,w16,w23,ror#11 eor w3,w3,w14,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w27,w20 // a^b, b^c in next round eor w16,w16,w23,ror#25 // Sigma1(e) eor w4,w4,w27,ror#13 add w26,w26,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w2,w2,w11,ror#19 eor w3,w3,w14,lsr#3 // sigma0(X[i+1]) add w26,w26,w16 // h+=Sigma1(e) eor w19,w19,w20 // Maj(a,b,c) eor w17,w4,w27,ror#22 // Sigma0(a) eor w2,w2,w11,lsr#10 // sigma1(X[i+14]) add w13,w13,w6 add w22,w22,w26 // d+=h add w26,w26,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w13,w13,w3 add w26,w26,w17 // h+=Sigma0(a) add w13,w13,w2 ldr w2,[sp,#12] str w5,[sp,#8] ror w16,w22,#6 add w25,w25,w19 // h+=K[i] ror w4,w15,#7 and w17,w23,w22 ror w3,w12,#17 bic w19,w24,w22 ror w5,w26,#2 add w25,w25,w13 // h+=X[i] eor w16,w16,w22,ror#11 eor w4,w4,w15,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w26,w27 // a^b, b^c in next round eor w16,w16,w22,ror#25 // Sigma1(e) eor w5,w5,w26,ror#13 add w25,w25,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w3,w3,w12,ror#19 eor w4,w4,w15,lsr#3 // sigma0(X[i+1]) add w25,w25,w16 // h+=Sigma1(e) eor w28,w28,w27 // Maj(a,b,c) eor w17,w5,w26,ror#22 // Sigma0(a) eor w3,w3,w12,lsr#10 // sigma1(X[i+14]) add w14,w14,w7 add w21,w21,w25 // d+=h add w25,w25,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w14,w14,w4 add w25,w25,w17 // h+=Sigma0(a) add w14,w14,w3 ldr w3,[sp,#0] str w6,[sp,#12] ror w16,w21,#6 add w24,w24,w28 // h+=K[i] ror w5,w0,#7 and w17,w22,w21 ror w4,w13,#17 bic w28,w23,w21 ror w6,w25,#2 add w24,w24,w14 // h+=X[i] eor w16,w16,w21,ror#11 eor w5,w5,w0,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w25,w26 // a^b, b^c in next round eor w16,w16,w21,ror#25 // Sigma1(e) eor w6,w6,w25,ror#13 add w24,w24,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w4,w4,w13,ror#19 eor w5,w5,w0,lsr#3 // sigma0(X[i+1]) add w24,w24,w16 // h+=Sigma1(e) eor w19,w19,w26 // Maj(a,b,c) eor w17,w6,w25,ror#22 // Sigma0(a) eor w4,w4,w13,lsr#10 // sigma1(X[i+14]) add w15,w15,w8 add w20,w20,w24 // d+=h add w24,w24,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w15,w15,w5 add w24,w24,w17 // h+=Sigma0(a) add w15,w15,w4 ldr w4,[sp,#4] str w7,[sp,#0] ror w16,w20,#6 add w23,w23,w19 // h+=K[i] ror w6,w1,#7 and w17,w21,w20 ror w5,w14,#17 bic w19,w22,w20 ror w7,w24,#2 add w23,w23,w15 // h+=X[i] eor w16,w16,w20,ror#11 eor w6,w6,w1,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w24,w25 // a^b, b^c in next round eor w16,w16,w20,ror#25 // Sigma1(e) eor w7,w7,w24,ror#13 add w23,w23,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w5,w5,w14,ror#19 eor w6,w6,w1,lsr#3 // sigma0(X[i+1]) add w23,w23,w16 // h+=Sigma1(e) eor w28,w28,w25 // Maj(a,b,c) eor w17,w7,w24,ror#22 // Sigma0(a) eor w5,w5,w14,lsr#10 // sigma1(X[i+14]) add w0,w0,w9 add w27,w27,w23 // d+=h add w23,w23,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w0,w0,w6 add w23,w23,w17 // h+=Sigma0(a) add w0,w0,w5 ldr w5,[sp,#8] str w8,[sp,#4] ror w16,w27,#6 add w22,w22,w28 // h+=K[i] ror w7,w2,#7 and w17,w20,w27 ror w6,w15,#17 bic w28,w21,w27 ror w8,w23,#2 add w22,w22,w0 // h+=X[i] eor w16,w16,w27,ror#11 eor w7,w7,w2,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w23,w24 // a^b, b^c in next round eor w16,w16,w27,ror#25 // Sigma1(e) eor w8,w8,w23,ror#13 add w22,w22,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w6,w6,w15,ror#19 eor w7,w7,w2,lsr#3 // sigma0(X[i+1]) add w22,w22,w16 // h+=Sigma1(e) eor w19,w19,w24 // Maj(a,b,c) eor w17,w8,w23,ror#22 // Sigma0(a) eor w6,w6,w15,lsr#10 // sigma1(X[i+14]) add w1,w1,w10 add w26,w26,w22 // d+=h add w22,w22,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w1,w1,w7 add w22,w22,w17 // h+=Sigma0(a) add w1,w1,w6 ldr w6,[sp,#12] str w9,[sp,#8] ror w16,w26,#6 add w21,w21,w19 // h+=K[i] ror w8,w3,#7 and w17,w27,w26 ror w7,w0,#17 bic w19,w20,w26 ror w9,w22,#2 add w21,w21,w1 // h+=X[i] eor w16,w16,w26,ror#11 eor w8,w8,w3,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w22,w23 // a^b, b^c in next round eor w16,w16,w26,ror#25 // Sigma1(e) eor w9,w9,w22,ror#13 add w21,w21,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w7,w7,w0,ror#19 eor w8,w8,w3,lsr#3 // sigma0(X[i+1]) add w21,w21,w16 // h+=Sigma1(e) eor w28,w28,w23 // Maj(a,b,c) eor w17,w9,w22,ror#22 // Sigma0(a) eor w7,w7,w0,lsr#10 // sigma1(X[i+14]) add w2,w2,w11 add w25,w25,w21 // d+=h add w21,w21,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w2,w2,w8 add w21,w21,w17 // h+=Sigma0(a) add w2,w2,w7 ldr w7,[sp,#0] str w10,[sp,#12] ror w16,w25,#6 add w20,w20,w28 // h+=K[i] ror w9,w4,#7 and w17,w26,w25 ror w8,w1,#17 bic w28,w27,w25 ror w10,w21,#2 add w20,w20,w2 // h+=X[i] eor w16,w16,w25,ror#11 eor w9,w9,w4,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w21,w22 // a^b, b^c in next round eor w16,w16,w25,ror#25 // Sigma1(e) eor w10,w10,w21,ror#13 add w20,w20,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w8,w8,w1,ror#19 eor w9,w9,w4,lsr#3 // sigma0(X[i+1]) add w20,w20,w16 // h+=Sigma1(e) eor w19,w19,w22 // Maj(a,b,c) eor w17,w10,w21,ror#22 // Sigma0(a) eor w8,w8,w1,lsr#10 // sigma1(X[i+14]) add w3,w3,w12 add w24,w24,w20 // d+=h add w20,w20,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w3,w3,w9 add w20,w20,w17 // h+=Sigma0(a) add w3,w3,w8 cbnz w19,.Loop_16_xx ldp x0,x2,[x29,#96] ldr x1,[x29,#112] sub x30,x30,#260 // rewind ldp w3,w4,[x0] ldp w5,w6,[x0,#2*4] add x1,x1,#14*4 // advance input pointer ldp w7,w8,[x0,#4*4] add w20,w20,w3 ldp w9,w10,[x0,#6*4] add w21,w21,w4 add w22,w22,w5 add w23,w23,w6 stp w20,w21,[x0] add w24,w24,w7 add w25,w25,w8 stp w22,w23,[x0,#2*4] add w26,w26,w9 add w27,w27,w10 cmp x1,x2 stp w24,w25,[x0,#4*4] stp w26,w27,[x0,#6*4] b.ne .Loop ldp x19,x20,[x29,#16] add sp,sp,#4*4 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .size sha256_block_data_order_nohw,.-sha256_block_data_order_nohw .section .rodata .align 6 .type .LK256,%object .LK256: .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0 //terminator .size .LK256,.-.LK256 .byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 .text #ifndef __KERNEL__ .globl sha256_block_data_order_hw .hidden sha256_block_data_order_hw .type sha256_block_data_order_hw,%function .align 6 sha256_block_data_order_hw: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#6] // kFlag_sha256_hw #endif // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 ld1 {v0.4s,v1.4s},[x0] adrp x3,.LK256 add x3,x3,:lo12:.LK256 .Loop_hw: ld1 {v4.16b,v5.16b,v6.16b,v7.16b},[x1],#64 sub x2,x2,#1 ld1 {v16.4s},[x3],#16 rev32 v4.16b,v4.16b rev32 v5.16b,v5.16b rev32 v6.16b,v6.16b rev32 v7.16b,v7.16b orr v18.16b,v0.16b,v0.16b // offload orr v19.16b,v1.16b,v1.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v4.4s .inst 0x5e2828a4 //sha256su0 v4.16b,v5.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .inst 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .inst 0x5e0760c4 //sha256su1 v4.16b,v6.16b,v7.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v5.4s .inst 0x5e2828c5 //sha256su0 v5.16b,v6.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .inst 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .inst 0x5e0460e5 //sha256su1 v5.16b,v7.16b,v4.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v6.4s .inst 0x5e2828e6 //sha256su0 v6.16b,v7.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .inst 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .inst 0x5e056086 //sha256su1 v6.16b,v4.16b,v5.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v7.4s .inst 0x5e282887 //sha256su0 v7.16b,v4.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .inst 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .inst 0x5e0660a7 //sha256su1 v7.16b,v5.16b,v6.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v4.4s .inst 0x5e2828a4 //sha256su0 v4.16b,v5.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .inst 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .inst 0x5e0760c4 //sha256su1 v4.16b,v6.16b,v7.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v5.4s .inst 0x5e2828c5 //sha256su0 v5.16b,v6.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .inst 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .inst 0x5e0460e5 //sha256su1 v5.16b,v7.16b,v4.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v6.4s .inst 0x5e2828e6 //sha256su0 v6.16b,v7.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .inst 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .inst 0x5e056086 //sha256su1 v6.16b,v4.16b,v5.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v7.4s .inst 0x5e282887 //sha256su0 v7.16b,v4.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .inst 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .inst 0x5e0660a7 //sha256su1 v7.16b,v5.16b,v6.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v4.4s .inst 0x5e2828a4 //sha256su0 v4.16b,v5.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .inst 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .inst 0x5e0760c4 //sha256su1 v4.16b,v6.16b,v7.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v5.4s .inst 0x5e2828c5 //sha256su0 v5.16b,v6.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .inst 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .inst 0x5e0460e5 //sha256su1 v5.16b,v7.16b,v4.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v6.4s .inst 0x5e2828e6 //sha256su0 v6.16b,v7.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .inst 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .inst 0x5e056086 //sha256su1 v6.16b,v4.16b,v5.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v7.4s .inst 0x5e282887 //sha256su0 v7.16b,v4.16b orr v2.16b,v0.16b,v0.16b .inst 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .inst 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .inst 0x5e0660a7 //sha256su1 v7.16b,v5.16b,v6.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v4.4s orr v2.16b,v0.16b,v0.16b .inst 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .inst 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v5.4s orr v2.16b,v0.16b,v0.16b .inst 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .inst 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s ld1 {v17.4s},[x3] add v16.4s,v16.4s,v6.4s sub x3,x3,#64*4-16 // rewind orr v2.16b,v0.16b,v0.16b .inst 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .inst 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s add v17.4s,v17.4s,v7.4s orr v2.16b,v0.16b,v0.16b .inst 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .inst 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s add v0.4s,v0.4s,v18.4s add v1.4s,v1.4s,v19.4s cbnz x2,.Loop_hw st1 {v0.4s,v1.4s},[x0] ldr x29,[sp],#16 ret .size sha256_block_data_order_hw,.-sha256_block_data_order_hw #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
286,320
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-unroll8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include "openssl/arm_arch.h" #if __ARM_MAX_ARCH__>=8 .text .arch armv8.2-a+crypto .globl aesv8_gcm_8x_enc_128 .hidden aesv8_gcm_8x_enc_128 .type aesv8_gcm_8x_enc_128,%function .align 4 aesv8_gcm_8x_enc_128: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#7] // kFlag_aesv8_gcm_8x_enc_128 #endif AARCH64_VALID_CALL_TARGET cbz x1, .L128_enc_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 mov x5, x9 ld1 { v0.16b}, [x16] //CTR block 0 sub x5, x5, #1 //byte_len - 1 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 ldr q27, [x11, #160] //load rk10 aese v3.16b, v26.16b //AES block 8k+11 - round 9 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v6.16b, v26.16b //AES block 8k+14 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 add x5, x5, x0 aese v0.16b, v26.16b //AES block 8k+8 - round 9 aese v7.16b, v26.16b //AES block 8k+15 - round 9 aese v5.16b, v26.16b //AES block 8k+13 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 add x4, x0, x1, lsr #3 //end_input_ptr cmp x0, x5 //check if we have <= 8 blocks b.ge .L128_enc_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load plaintext ldp q10, q11, [x0], #32 //AES block 2, 3 - load plaintext ldp q12, q13, [x0], #32 //AES block 4, 5 - load plaintext ldp q14, q15, [x0], #32 //AES block 6, 7 - load plaintext cmp x0, x5 //check if we have <= 8 blocks .inst 0xce006d08 //eor3 v8.16b, v8.16b, v0.16b, v27.16b //AES block 0 - result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 .inst 0xce016d29 //eor3 v9.16b, v9.16b, v1.16b, v27.16b //AES block 1 - result stp q8, q9, [x2], #32 //AES block 0, 1 - store result rev32 v1.16b, v30.16b //CTR block 9 .inst 0xce056dad //eor3 v13.16b, v13.16b, v5.16b, v27.16b //AES block 5 - result add v30.4s, v30.4s, v31.4s //CTR block 9 .inst 0xce026d4a //eor3 v10.16b, v10.16b, v2.16b, v27.16b //AES block 2 - result .inst 0xce066dce //eor3 v14.16b, v14.16b, v6.16b, v27.16b //AES block 6 - result .inst 0xce046d8c //eor3 v12.16b, v12.16b, v4.16b, v27.16b //AES block 4 - result rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 .inst 0xce036d6b //eor3 v11.16b, v11.16b, v3.16b, v27.16b //AES block 3 - result .inst 0xce076def //eor3 v15.16b, v15.16b, v7.16b,v27.16b //AES block 7 - result stp q10, q11, [x2], #32 //AES block 2, 3 - store result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 stp q12, q13, [x2], #32 //AES block 4, 5 - store result stp q14, q15, [x2], #32 //AES block 6, 7 - store result rev32 v4.16b, v30.16b //CTR block 12 add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge .L128_enc_prepretail //do prepretail .L128_enc_main_loop: //main loop start rev32 v5.16b, v30.16b //CTR block 8k+13 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v9.16b, v9.16b //GHASH block 8k+1 rev64 v8.16b, v8.16b //GHASH block 8k ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev64 v13.16b, v13.16b //GHASH block 8k+5 (t0, t1, t2 and t3 free) rev64 v11.16b, v11.16b //GHASH block 8k+3 ldp q26, q27, [x11, #0] //load rk0, rk1 eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v15.16b, v15.16b //GHASH block 8k+7 (t0, t1, t2 and t3 free) pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high rev64 v10.16b, v10.16b //GHASH block 8k+2 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h3l | h3h aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b,v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid ldp q28, q26, [x11, #32] //load rk2, rk3 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid rev64 v14.16b, v14.16b //GHASH block 8k+6 (t0, t1, and t2 free) .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k rev64 v12.16b, v12.16b //GHASH block 8k+4 (t0, t1, and t2 free) ldp q27, q28, [x11, #64] //load rk4, rk5 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h1l | h1h pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 ldp q26, q27, [x11, #96] //load rk6, rk7 trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid ldr d16, [x10] //MODULO - load modulo constant pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load plaintext aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 rev32 v20.16b, v30.16b //CTR block 8k+16 add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid ldp q28, q26, [x11, #128] //load rk8, rk9 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load plaintext aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 rev32 v22.16b, v30.16b //CTR block 8k+17 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load plaintext add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ldr q27, [x11, #160] //load rk10 ext v29.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment rev32 v23.16b, v30.16b //CTR block 8k+18 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load plaintext rev32 v25.16b, v30.16b //CTR block 8k+19 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 cmp x0, x5 //.LOOP CONTROL .inst 0xce046d8c //eor3 v12.16b, v12.16b, v4.16b, v27.16b //AES block 4 - result aese v7.16b, v26.16b //AES block 8k+15 - round 9 aese v6.16b, v26.16b //AES block 8k+14 - round 9 aese v3.16b, v26.16b //AES block 8k+11 - round 9 .inst 0xce026d4a //eor3 v10.16b, v10.16b, v2.16b, v27.16b //AES block 8k+10 - result mov v2.16b, v23.16b //CTR block 8k+18 aese v0.16b, v26.16b //AES block 8k+8 - round 9 rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 .inst 0xce076def //eor3 v15.16b, v15.16b, v7.16b, v27.16b //AES block 7 - result aese v5.16b, v26.16b //AES block 8k+13 - round 9 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low .inst 0xce016d29 //eor3 v9.16b, v9.16b, v1.16b, v27.16b //AES block 8k+9 - result .inst 0xce036d6b //eor3 v11.16b, v11.16b, v3.16b, v27.16b //AES block 8k+11 - result mov v3.16b, v25.16b //CTR block 8k+19 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .inst 0xce056dad //eor3 v13.16b, v13.16b, v5.16b, v27.16b //AES block 5 - result mov v1.16b, v22.16b //CTR block 8k+17 .inst 0xce006d08 //eor3 v8.16b, v8.16b, v0.16b, v27.16b //AES block 8k+8 - result mov v0.16b, v20.16b //CTR block 8k+16 stp q8, q9, [x2], #32 //AES block 8k+8, 8k+9 - store result stp q10, q11, [x2], #32 //AES block 8k+10, 8k+11 - store result .inst 0xce066dce //eor3 v14.16b, v14.16b, v6.16b, v27.16b //AES block 6 - result stp q12, q13, [x2], #32 //AES block 8k+12, 8k+13 - store result .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low stp q14, q15, [x2], #32 //AES block 8k+14, 8k+15 - store result b.lt .L128_enc_main_loop .L128_enc_prepretail: //PREPRETAIL rev32 v5.16b, v30.16b //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev64 v8.16b, v8.16b //GHASH block 8k rev64 v9.16b, v9.16b //GHASH block 8k+1 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h6k | h5k add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v11.16b, v11.16b //GHASH block 8k+3 rev64 v10.16b, v10.16b //GHASH block 8k+2 eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v6.16b, v30.16b //CTR block 8k+14 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high rev64 v13.16b, v13.16b //GHASH block 8k+5 (t0, t1, t2 and t3 free) trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid ldp q26, q27, [x11, #0] //load rk0, rk1 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid rev64 v12.16b, v12.16b //GHASH block 8k+4 (t0, t1, and t2 free) rev64 v15.16b, v15.16b //GHASH block 8k+7 (t0, t1, t2 and t3 free) eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v14.16b, v14.16b //GHASH block 8k+6 (t0, t1, and t2 free) aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h ldp q28, q26, [x11, #32] //load rk2, rk3 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h1l | h1h trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high ldp q26, q27, [x11, #96] //load rk6, rk7 pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 ldr d16, [x10] //MODULO - load modulo constant aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ldp q28, q26, [x11, #128] //load rk8, rk9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 ext v29.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 ext v18.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 .inst 0xce114a73 //eor3 v19.16b, v19.16b, v17.16b, v18.16b //MODULO - fold into low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 ldr q27, [x11, #160] //load rk10 aese v6.16b, v26.16b //AES block 8k+14 - round 9 aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v0.16b, v26.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 aese v3.16b, v26.16b //AES block 8k+11 - round 9 aese v5.16b, v26.16b //AES block 8k+13 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 aese v7.16b, v26.16b //AES block 8k+15 - round 9 .L128_enc_tail: //TAIL sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process ldr q8, [x0], #16 //AES block 8k+8 - load plaintext mov v29.16b, v27.16b ldp q20, q21, [x6, #96] //load h5l | h5h .inst 0xce007509 //eor3 v9.16b, v8.16b, v0.16b, v29.16b //AES block 8k+8 - result ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag ldp q22, q23, [x6, #128] //load h6l | h6h ldp q24, q25, [x6, #160] //load h8k | h7k cmp x5, #112 b.gt .L128_enc_blocks_more_than_7 mov v7.16b, v6.16b mov v6.16b, v5.16b movi v17.8b, #0 cmp x5, #96 sub v30.4s, v30.4s, v31.4s mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v2.16b mov v2.16b, v1.16b movi v19.8b, #0 movi v18.8b, #0 b.gt .L128_enc_blocks_more_than_6 mov v7.16b, v6.16b cmp x5, #80 sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b b.gt .L128_enc_blocks_more_than_5 cmp x5, #64 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b b.gt .L128_enc_blocks_more_than_4 mov v7.16b, v6.16b sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v1.16b cmp x5, #48 b.gt .L128_enc_blocks_more_than_3 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v1.16b cmp x5, #32 ldr q24, [x6, #64] //load h4k | h3k b.gt .L128_enc_blocks_more_than_2 cmp x5, #16 sub v30.4s, v30.4s, v31.4s mov v7.16b, v1.16b b.gt .L128_enc_blocks_more_than_1 ldr q21, [x6, #16] //load h2k | h1k sub v30.4s, v30.4s, v31.4s b .L128_enc_blocks_less_than_1 .L128_enc_blocks_more_than_7: //blocks left > 7 st1 { v9.16b}, [x2], #16 //AES final-7 block - store result rev64 v8.16b, v9.16b //GHASH final-7 block ldr q9, [x0], #16 //AES final-6 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-7 block - mid pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high ins v18.d[0], v24.d[1] //GHASH final-7 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in .inst 0xce017529 //eor3 v9.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low .L128_enc_blocks_more_than_6: //blocks left > 6 st1 { v9.16b}, [x2], #16 //AES final-6 block - store result rev64 v8.16b, v9.16b //GHASH final-6 block ldr q9, [x0], #16 //AES final-5 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-6 block - mid .inst 0xce027529 //eor3 v9.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high .L128_enc_blocks_more_than_5: //blocks left > 5 st1 { v9.16b}, [x2], #16 //AES final-5 block - store result rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid ldr q9, [x0], #16 //AES final-4 block - load plaintext pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid ins v27.d[1], v27.d[0] //GHASH final-5 block - mid .inst 0xce037529 //eor3 v9.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low movi v16.8b, #0 //supress further partial tag feed in pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid .L128_enc_blocks_more_than_4: //blocks left > 4 st1 { v9.16b}, [x2], #16 //AES final-4 block - store result rev64 v8.16b, v9.16b //GHASH final-4 block ldr q9, [x0], #16 //AES final-3 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-4 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low .inst 0xce047529 //eor3 v9.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid .L128_enc_blocks_more_than_3: //blocks left > 3 st1 { v9.16b}, [x2], #16 //AES final-3 block - store result ldr q25, [x6, #80] //load h4l | h4h rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-3 block - mid ldr q24, [x6, #64] //load h4k | h3k pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low ldr q9, [x0], #16 //AES final-2 block - load plaintext eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ins v27.d[1], v27.d[0] //GHASH final-3 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low .inst 0xce057529 //eor3 v9.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high .L128_enc_blocks_more_than_2: //blocks left > 2 st1 { v9.16b}, [x2], #16 //AES final-2 block - store result rev64 v8.16b, v9.16b //GHASH final-2 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-1 block - load plaintext ins v27.d[0], v8.d[1] //GHASH final-2 block - mid ldr q23, [x6, #48] //load h3l | h3h movi v16.8b, #0 //supress further partial tag feed in eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid .inst 0xce067529 //eor3 v9.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low .L128_enc_blocks_more_than_1: //blocks left > 1 st1 { v9.16b}, [x2], #16 //AES final-1 block - store result ldr q22, [x6, #32] //load h2l | h2h rev64 v8.16b, v9.16b //GHASH final-1 block ldr q9, [x0], #16 //AES final block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-1 block - mid .inst 0xce077529 //eor3 v9.16b, v9.16b, v7.16b, v29.16b //AES final block - result pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ldr q21, [x6, #16] //load h2k | h1k ins v27.d[1], v27.d[0] //GHASH final-1 block - mid pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low .L128_enc_blocks_less_than_1: //blocks left <= 1 rev32 v30.16b, v30.16b str q30, [x16] //store the updated counter and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) mvn x7, xzr //temp0_x = 0xffffffffffffffff ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block mvn x8, xzr //temp1_x = 0xffffffffffffffff cmp x1, #64 csel x13, x8, x7, lt csel x14, x7, xzr, lt mov v0.d[1], x14 mov v0.d[0], x13 //ctr0b is mask for last block and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits rev64 v8.16b, v9.16b //GHASH final block bif v9.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing st1 { v9.16b}, [x2] //store all 16B eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v16.d[0], v8.d[1] //GHASH final block - mid eor v16.8b, v16.8b, v8.8b //GHASH final block - mid ldr q20, [x6] //load h1l | h1h pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v17.16b, v17.16b, v28.16b //GHASH final block - high eor v19.16b, v19.16b, v26.16b //GHASH final block - low ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret .L128_enc_ret: mov w0, #0x0 ret .size aesv8_gcm_8x_enc_128,.-aesv8_gcm_8x_enc_128 .globl aesv8_gcm_8x_dec_128 .hidden aesv8_gcm_8x_dec_128 .type aesv8_gcm_8x_dec_128,%function .align 4 aesv8_gcm_8x_dec_128: AARCH64_VALID_CALL_TARGET cbz x1, .L128_dec_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 mov x5, x9 ld1 { v0.16b}, [x16] //CTR block 0 ldp q26, q27, [x11, #0] //load rk0, rk1 sub x5, x5, #1 //byte_len - 1 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b rev32 v30.16b, v0.16b //set up reversed counter aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 rev32 v7.16b, v30.16b //CTR block 7 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 add x5, x5, x0 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v0.16b, v26.16b //AES block 0 - round 9 aese v1.16b, v26.16b //AES block 1 - round 9 aese v6.16b, v26.16b //AES block 6 - round 9 ldr q27, [x11, #160] //load rk10 aese v4.16b, v26.16b //AES block 4 - round 9 aese v3.16b, v26.16b //AES block 3 - round 9 aese v2.16b, v26.16b //AES block 2 - round 9 aese v5.16b, v26.16b //AES block 5 - round 9 aese v7.16b, v26.16b //AES block 7 - round 9 add x4, x0, x1, lsr #3 //end_input_ptr cmp x0, x5 //check if we have <= 8 blocks b.ge .L128_dec_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load ciphertext .inst 0xce006d00 //eor3 v0.16b, v8.16b, v0.16b, v27.16b //AES block 0 - result .inst 0xce016d21 //eor3 v1.16b, v9.16b, v1.16b, v27.16b //AES block 1 - result stp q0, q1, [x2], #32 //AES block 0, 1 - store result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 ldp q10, q11, [x0], #32 //AES block 2, 3 - load ciphertext ldp q12, q13, [x0], #32 //AES block 4, 5 - load ciphertext rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 ldp q14, q15, [x0], #32 //AES block 6, 7 - load ciphertext .inst 0xce036d63 //eor3 v3.16b, v11.16b, v3.16b, v27.16b //AES block 3 - result .inst 0xce026d42 //eor3 v2.16b, v10.16b, v2.16b, v27.16b //AES block 2 - result stp q2, q3, [x2], #32 //AES block 2, 3 - store result rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 .inst 0xce066dc6 //eor3 v6.16b, v14.16b, v6.16b, v27.16b //AES block 6 - result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 .inst 0xce046d84 //eor3 v4.16b, v12.16b, v4.16b, v27.16b //AES block 4 - result .inst 0xce056da5 //eor3 v5.16b, v13.16b, v5.16b, v27.16b //AES block 5 - result stp q4, q5, [x2], #32 //AES block 4, 5 - store result .inst 0xce076de7 //eor3 v7.16b, v15.16b, v7.16b, v27.16b //AES block 7 - result stp q6, q7, [x2], #32 //AES block 6, 7 - store result rev32 v4.16b, v30.16b //CTR block 12 cmp x0, x5 //check if we have <= 8 blocks add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge .L128_dec_prepretail //do prepretail .L128_dec_main_loop: //main loop start ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v9.16b, v9.16b //GHASH block 8k+1 rev64 v8.16b, v8.16b //GHASH block 8k ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v14.16b, v14.16b //GHASH block 8k+6 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v10.16b, v10.16b //GHASH block 8k+2 rev64 v12.16b, v12.16b //GHASH block 8k+4 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high rev64 v11.16b, v11.16b //GHASH block 8k+3 rev32 v7.16b, v30.16b //CTR block 8k+15 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid rev64 v13.16b, v13.16b //GHASH block 8k+5 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high ldp q28, q26, [x11, #32] //load rk2, rk3 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 rev64 v15.16b, v15.16b //GHASH block 8k+7 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high ldp q27, q28, [x11, #64] //load rk4, rk5 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid ldp q26, q27, [x11, #96] //load rk6, rk7 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 ldr d16, [x10] //MODULO - load modulo constant .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 rev32 v20.16b, v30.16b //CTR block 8k+16 .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 rev32 v22.16b, v30.16b //CTR block 8k+17 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load ciphertext ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load ciphertext aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 rev32 v23.16b, v30.16b //CTR block 8k+18 ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load ciphertext aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load ciphertext aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v0.16b, v26.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 ldr q27, [x11, #160] //load rk10 aese v6.16b, v26.16b //AES block 8k+14 - round 9 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v7.16b, v26.16b //AES block 8k+15 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment rev32 v25.16b, v30.16b //CTR block 8k+19 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 aese v3.16b, v26.16b //AES block 8k+11 - round 9 aese v5.16b, v26.16b //AES block 8k+13 - round 9 .inst 0xce016d21 //eor3 v1.16b, v9.16b, v1.16b, v27.16b //AES block 8k+9 - result .inst 0xce006d00 //eor3 v0.16b, v8.16b, v0.16b, v27.16b //AES block 8k+8 - result .inst 0xce076de7 //eor3 v7.16b, v15.16b, v7.16b, v27.16b //AES block 8k+15 - result .inst 0xce066dc6 //eor3 v6.16b, v14.16b, v6.16b, v27.16b //AES block 8k+14 - result .inst 0xce026d42 //eor3 v2.16b, v10.16b, v2.16b, v27.16b //AES block 8k+10 - result stp q0, q1, [x2], #32 //AES block 8k+8, 8k+9 - store result mov v1.16b, v22.16b //CTR block 8k+17 .inst 0xce046d84 //eor3 v4.16b, v12.16b, v4.16b, v27.16b //AES block 8k+12 - result .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low mov v0.16b, v20.16b //CTR block 8k+16 .inst 0xce036d63 //eor3 v3.16b, v11.16b, v3.16b, v27.16b //AES block 8k+11 - result cmp x0, x5 //.LOOP CONTROL stp q2, q3, [x2], #32 //AES block 8k+10, 8k+11 - store result .inst 0xce056da5 //eor3 v5.16b, v13.16b, v5.16b, v27.16b //AES block 8k+13 - result mov v2.16b, v23.16b //CTR block 8k+18 stp q4, q5, [x2], #32 //AES block 8k+12, 8k+13 - store result rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 stp q6, q7, [x2], #32 //AES block 8k+14, 8k+15 - store result mov v3.16b, v25.16b //CTR block 8k+19 b.lt .L128_dec_main_loop .L128_dec_prepretail: //PREPRETAIL rev64 v11.16b, v11.16b //GHASH block 8k+3 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v8.16b, v8.16b //GHASH block 8k rev64 v10.16b, v10.16b //GHASH block 8k+2 rev32 v5.16b, v30.16b //CTR block 8k+13 ldp q26, q27, [x11, #0] //load rk0, rk1 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h eor v8.16b, v8.16b, v19.16b //PRE 1 rev64 v9.16b, v9.16b //GHASH block 8k+1 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev64 v13.16b, v13.16b //GHASH block 8k+5 rev64 v12.16b, v12.16b //GHASH block 8k+4 rev64 v14.16b, v14.16b //GHASH block 8k+6 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low rev32 v7.16b, v30.16b //CTR block 8k+15 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 ldp q28, q26, [x11, #32] //load rk2, rk3 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid ldp q27, q28, [x11, #64] //load rk4, rk5 rev64 v15.16b, v15.16b //GHASH block 8k+7 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid ldp q26, q27, [x11, #96] //load rk6, rk7 .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 ldr d16, [x10] //MODULO - load modulo constant pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ldp q28, q26, [x11, #128] //load rk8, rk9 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid ldr q27, [x11, #160] //load rk10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v6.16b, v26.16b //AES block 8k+14 - round 9 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v3.16b, v26.16b //AES block 8k+11 - round 9 aese v5.16b, v26.16b //AES block 8k+13 - round 9 aese v0.16b, v26.16b //AES block 8k+8 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 aese v7.16b, v26.16b //AES block 8k+15 - round 9 .L128_dec_tail: //TAIL mov v29.16b, v27.16b sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process cmp x5, #112 ldp q24, q25, [x6, #160] //load h8k | h7k ldr q9, [x0], #16 //AES block 8k+8 - load ciphertext ldp q20, q21, [x6, #96] //load h5l | h5h ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag ldp q22, q23, [x6, #128] //load h6l | h6h .inst 0xce00752c //eor3 v12.16b, v9.16b, v0.16b, v29.16b //AES block 8k+8 - result b.gt .L128_dec_blocks_more_than_7 cmp x5, #96 mov v7.16b, v6.16b movi v19.8b, #0 movi v17.8b, #0 mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v2.16b mov v2.16b, v1.16b movi v18.8b, #0 sub v30.4s, v30.4s, v31.4s b.gt .L128_dec_blocks_more_than_6 cmp x5, #80 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b b.gt .L128_dec_blocks_more_than_5 cmp x5, #64 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt .L128_dec_blocks_more_than_4 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v1.16b cmp x5, #48 b.gt .L128_dec_blocks_more_than_3 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b cmp x5, #32 ldr q24, [x6, #64] //load h4k | h3k mov v6.16b, v1.16b b.gt .L128_dec_blocks_more_than_2 cmp x5, #16 mov v7.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt .L128_dec_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b .L128_dec_blocks_less_than_1 .L128_dec_blocks_more_than_7: //blocks left > 7 rev64 v8.16b, v9.16b //GHASH final-7 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v18.d[0], v24.d[1] //GHASH final-7 block - mid pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low ins v27.d[0], v8.d[1] //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in ldr q9, [x0], #16 //AES final-6 block - load ciphertext eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high st1 { v12.16b}, [x2], #16 //AES final-7 block - store result .inst 0xce01752c //eor3 v12.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid .L128_dec_blocks_more_than_6: //blocks left > 6 rev64 v8.16b, v9.16b //GHASH final-6 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-6 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low ldr q9, [x0], #16 //AES final-5 block - load ciphertext movi v16.8b, #0 //supress further partial tag feed in pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid st1 { v12.16b}, [x2], #16 //AES final-6 block - store result pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid .inst 0xce02752c //eor3 v12.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result .L128_dec_blocks_more_than_5: //blocks left > 5 rev64 v8.16b, v9.16b //GHASH final-5 block ldr q9, [x0], #16 //AES final-4 block - load ciphertext st1 { v12.16b}, [x2], #16 //AES final-5 block - store result eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid .inst 0xce03752c //eor3 v12.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid ins v27.d[1], v27.d[0] //GHASH final-5 block - mid pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low movi v16.8b, #0 //supress further partial tag feed in pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high .L128_dec_blocks_more_than_4: //blocks left > 4 rev64 v8.16b, v9.16b //GHASH final-4 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-3 block - load ciphertext ins v27.d[0], v8.d[1] //GHASH final-4 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high st1 { v12.16b}, [x2], #16 //AES final-4 block - store result eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid .inst 0xce04752c //eor3 v12.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid .L128_dec_blocks_more_than_3: //blocks left > 3 st1 { v12.16b}, [x2], #16 //AES final-3 block - store result rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-3 block - mid ldr q25, [x6, #80] //load h4l | h4h ldr q24, [x6, #64] //load h4k | h3k eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ldr q9, [x0], #16 //AES final-2 block - load ciphertext ins v27.d[1], v27.d[0] //GHASH final-3 block - mid pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high movi v16.8b, #0 //supress further partial tag feed in .inst 0xce05752c //eor3 v12.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid .L128_dec_blocks_more_than_2: //blocks left > 2 rev64 v8.16b, v9.16b //GHASH final-2 block st1 { v12.16b}, [x2], #16 //AES final-2 block - store result eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q23, [x6, #48] //load h3l | h3h movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-2 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid ldr q9, [x0], #16 //AES final-1 block - load ciphertext eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low .inst 0xce06752c //eor3 v12.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high .L128_dec_blocks_more_than_1: //blocks left > 1 st1 { v12.16b}, [x2], #16 //AES final-1 block - store result rev64 v8.16b, v9.16b //GHASH final-1 block ldr q22, [x6, #32] //load h2l | h2h eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-1 block - mid ldr q9, [x0], #16 //AES final block - load ciphertext pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high ldr q21, [x6, #16] //load h2k | h1k ins v27.d[1], v27.d[0] //GHASH final-1 block - mid .inst 0xce07752c //eor3 v12.16b, v9.16b, v7.16b, v29.16b //AES final block - result pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid .L128_dec_blocks_less_than_1: //blocks left <= 1 and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 mvn x8, xzr //temp1_x = 0xffffffffffffffff csel x13, x8, x7, lt csel x14, x7, xzr, lt mov v0.d[1], x14 mov v0.d[0], x13 //ctr0b is mask for last block ldr q20, [x6] //load h1l | h1h ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits rev64 v8.16b, v9.16b //GHASH final block eor v8.16b, v8.16b, v16.16b //feed in partial tag pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high ins v16.d[0], v8.d[1] //GHASH final block - mid eor v17.16b, v17.16b, v28.16b //GHASH final block - high eor v16.8b, v16.8b, v8.8b //GHASH final block - mid bif v12.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid st1 { v12.16b}, [x2] //store all 16B pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant eor v19.16b, v19.16b, v26.16b //GHASH final block - low eor v14.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid ext v17.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment eor v18.16b, v18.16b, v14.16b //MODULO - karatsuba tidy up .inst 0xce115652 //eor3 v18.16b, v18.16b, v17.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v18.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .inst 0xce124673 //eor3 v19.16b, v19.16b, v18.16b, v17.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] rev32 v30.16b, v30.16b str q30, [x16] //store the updated counter mov x0, x9 ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret .L128_dec_ret: mov w0, #0x0 ret .size aesv8_gcm_8x_dec_128,.-aesv8_gcm_8x_dec_128 .globl aesv8_gcm_8x_enc_192 .hidden aesv8_gcm_8x_enc_192 .type aesv8_gcm_8x_enc_192,%function .align 4 aesv8_gcm_8x_enc_192: AARCH64_VALID_CALL_TARGET cbz x1, .L192_enc_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 mov x5, x9 ld1 { v0.16b}, [x16] //CTR block 0 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 sub x5, x5, #1 //byte_len - 1 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 ldp q26, q27, [x11, #0] //load rk0, rk1 add x5, x5, x0 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 add x4, x0, x1, lsr #3 //end_input_ptr cmp x0, x5 //check if we have <= 8 blocks aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 9 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b ldp q27, q28, [x11, #160] //load rk10, rk11 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 9 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 14 - round 10 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 11 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 9 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 13 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 12 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 10 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 15 - round 10 aese v6.16b, v28.16b //AES block 14 - round 11 aese v3.16b, v28.16b //AES block 11 - round 11 aese v4.16b, v28.16b //AES block 12 - round 11 aese v7.16b, v28.16b //AES block 15 - round 11 ldr q26, [x11, #192] //load rk12 aese v1.16b, v28.16b //AES block 9 - round 11 aese v5.16b, v28.16b //AES block 13 - round 11 aese v2.16b, v28.16b //AES block 10 - round 11 aese v0.16b, v28.16b //AES block 8 - round 11 b.ge .L192_enc_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load plaintext ldp q10, q11, [x0], #32 //AES block 2, 3 - load plaintext ldp q12, q13, [x0], #32 //AES block 4, 5 - load plaintext ldp q14, q15, [x0], #32 //AES block 6, 7 - load plaintext .inst 0xce006908 //eor3 v8.16b, v8.16b, v0.16b, v26.16b //AES block 0 - result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 .inst 0xce03696b //eor3 v11.16b, v11.16b, v3.16b, v26.16b //AES block 3 - result .inst 0xce016929 //eor3 v9.16b, v9.16b, v1.16b, v26.16b //AES block 1 - result rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 .inst 0xce04698c //eor3 v12.16b, v12.16b, v4.16b, v26.16b //AES block 4 - result .inst 0xce0569ad //eor3 v13.16b, v13.16b, v5.16b, v26.16b //AES block 5 - result .inst 0xce0769ef //eor3 v15.16b, v15.16b, v7.16b, v26.16b //AES block 7 - result stp q8, q9, [x2], #32 //AES block 0, 1 - store result .inst 0xce02694a //eor3 v10.16b, v10.16b, v2.16b, v26.16b //AES block 2 - result rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 stp q10, q11, [x2], #32 //AES block 2, 3 - store result cmp x0, x5 //check if we have <= 8 blocks rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 .inst 0xce0669ce //eor3 v14.16b, v14.16b, v6.16b, v26.16b //AES block 6 - result stp q12, q13, [x2], #32 //AES block 4, 5 - store result rev32 v4.16b, v30.16b //CTR block 12 stp q14, q15, [x2], #32 //AES block 6, 7 - store result add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge .L192_enc_prepretail //do prepretail .L192_enc_main_loop: //main loop start rev64 v12.16b, v12.16b //GHASH block 8k+4 (t0, t1, and t2 free) ldp q26, q27, [x11, #0] //load rk0, rk1 rev64 v10.16b, v10.16b //GHASH block 8k+2 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v8.16b, v8.16b //GHASH block 8k ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev64 v9.16b, v9.16b //GHASH block 8k+1 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 eor v8.16b, v8.16b, v19.16b //PRE 1 rev64 v11.16b, v11.16b //GHASH block 8k+3 rev64 v13.16b, v13.16b //GHASH block 8k+5 (t0, t1, t2 and t3 free) aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 rev32 v7.16b, v30.16b //CTR block 8k+15 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h ldp q26, q27, [x11, #96] //load rk6, rk7 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 (t0, t1, t2 and t3 free) rev64 v14.16b, v14.16b //GHASH block 8k+6 (t0, t1, and t2 free) pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid ldp q28, q26, [x11, #128] //load rk8, rk9 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 ldr d16, [x10] //MODULO - load modulo constant .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 ldp q27, q28, [x11, #160] //load rk10, rk11 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low rev32 v20.16b, v30.16b //CTR block 8k+16 add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load plaintext pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid rev32 v22.16b, v30.16b //CTR block 8k+17 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 ldr q26, [x11, #192] //load rk12 ext v29.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load plaintext aese v4.16b, v28.16b //AES block 8k+12 - round 11 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load plaintext ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load plaintext aese v2.16b, v28.16b //AES block 8k+10 - round 11 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 rev32 v23.16b, v30.16b //CTR block 8k+18 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v5.16b, v28.16b //AES block 8k+13 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 aese v7.16b, v28.16b //AES block 8k+15 - round 11 aese v0.16b, v28.16b //AES block 8k+8 - round 11 .inst 0xce04698c //eor3 v12.16b, v12.16b, v4.16b, v26.16b //AES block 4 - result aese v6.16b, v28.16b //AES block 8k+14 - round 11 aese v3.16b, v28.16b //AES block 8k+11 - round 11 aese v1.16b, v28.16b //AES block 8k+9 - round 11 rev32 v25.16b, v30.16b //CTR block 8k+19 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 .inst 0xce0769ef //eor3 v15.16b, v15.16b, v7.16b, v26.16b //AES block 7 - result .inst 0xce02694a //eor3 v10.16b, v10.16b, v2.16b, v26.16b //AES block 8k+10 - result .inst 0xce006908 //eor3 v8.16b, v8.16b, v0.16b, v26.16b //AES block 8k+8 - result mov v2.16b, v23.16b //CTR block 8k+18 .inst 0xce016929 //eor3 v9.16b, v9.16b, v1.16b, v26.16b //AES block 8k+9 - result mov v1.16b, v22.16b //CTR block 8k+17 stp q8, q9, [x2], #32 //AES block 8k+8, 8k+9 - store result ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .inst 0xce0669ce //eor3 v14.16b, v14.16b, v6.16b, v26.16b //AES block 6 - result mov v0.16b, v20.16b //CTR block 8k+16 rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 .inst 0xce0569ad //eor3 v13.16b, v13.16b, v5.16b, v26.16b //AES block 5 - result .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low .inst 0xce03696b //eor3 v11.16b, v11.16b, v3.16b, v26.16b //AES block 8k+11 - result mov v3.16b, v25.16b //CTR block 8k+19 stp q10, q11, [x2], #32 //AES block 8k+10, 8k+11 - store result stp q12, q13, [x2], #32 //AES block 8k+12, 8k+13 - store result cmp x0, x5 //.LOOP CONTROL stp q14, q15, [x2], #32 //AES block 8k+14, 8k+15 - store result b.lt .L192_enc_main_loop .L192_enc_prepretail: //PREPRETAIL rev32 v5.16b, v30.16b //CTR block 8k+13 ldp q26, q27, [x11, #0] //load rk0, rk1 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v8.16b, v8.16b //GHASH block 8k ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev64 v11.16b, v11.16b //GHASH block 8k+3 rev64 v10.16b, v10.16b //GHASH block 8k+2 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v9.16b, v9.16b //GHASH block 8k+1 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 rev64 v13.16b, v13.16b //GHASH block 8k+5 (t0, t1, t2 and t3 free) rev64 v14.16b, v14.16b //GHASH block 8k+6 (t0, t1, and t2 free) aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid ldp q27, q28, [x11, #64] //load rk4, rk5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 rev64 v12.16b, v12.16b //GHASH block 8k+4 (t0, t1, and t2 free) aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 (t0, t1, t2 and t3 free) ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 ldr d16, [x10] //MODULO - load modulo constant aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ext v29.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 ldp q27, q28, [x11, #160] //load rk10, rk11 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ldr q26, [x11, #192] //load rk12 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v1.16b, v28.16b //AES block 8k+9 - round 11 aese v7.16b, v28.16b //AES block 8k+15 - round 11 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v3.16b, v28.16b //AES block 8k+11 - round 11 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v2.16b, v28.16b //AES block 8k+10 - round 11 aese v0.16b, v28.16b //AES block 8k+8 - round 11 aese v6.16b, v28.16b //AES block 8k+14 - round 11 aese v4.16b, v28.16b //AES block 8k+12 - round 11 aese v5.16b, v28.16b //AES block 8k+13 - round 11 .L192_enc_tail: //TAIL ldp q20, q21, [x6, #96] //load h5l | h5h sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process ldr q8, [x0], #16 //AES block 8k+8 - l3ad plaintext ldp q24, q25, [x6, #160] //load h8k | h7k mov v29.16b, v26.16b ldp q22, q23, [x6, #128] //load h6l | h6h cmp x5, #112 .inst 0xce007509 //eor3 v9.16b, v8.16b, v0.16b, v29.16b //AES block 8k+8 - result ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag b.gt .L192_enc_blocks_more_than_7 cmp x5, #96 mov v7.16b, v6.16b movi v17.8b, #0 mov v6.16b, v5.16b movi v19.8b, #0 sub v30.4s, v30.4s, v31.4s mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v2.16b mov v2.16b, v1.16b movi v18.8b, #0 b.gt .L192_enc_blocks_more_than_6 mov v7.16b, v6.16b cmp x5, #80 mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt .L192_enc_blocks_more_than_5 cmp x5, #64 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b b.gt .L192_enc_blocks_more_than_4 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v1.16b sub v30.4s, v30.4s, v31.4s cmp x5, #48 b.gt .L192_enc_blocks_more_than_3 mov v7.16b, v6.16b mov v6.16b, v1.16b sub v30.4s, v30.4s, v31.4s ldr q24, [x6, #64] //load h4k | h3k cmp x5, #32 b.gt .L192_enc_blocks_more_than_2 sub v30.4s, v30.4s, v31.4s cmp x5, #16 mov v7.16b, v1.16b b.gt .L192_enc_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b .L192_enc_blocks_less_than_1 .L192_enc_blocks_more_than_7: //blocks left > 7 st1 { v9.16b}, [x2], #16 //AES final-7 block - store result rev64 v8.16b, v9.16b //GHASH final-7 block ins v18.d[0], v24.d[1] //GHASH final-7 block - mid eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-7 block - mid ldr q9, [x0], #16 //AES final-6 block - load plaintext eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid .inst 0xce017529 //eor3 v9.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result .L192_enc_blocks_more_than_6: //blocks left > 6 st1 { v9.16b}, [x2], #16 //AES final-6 block - store result rev64 v8.16b, v9.16b //GHASH final-6 block ldr q9, [x0], #16 //AES final-5 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-6 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low .inst 0xce027529 //eor3 v9.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid .L192_enc_blocks_more_than_5: //blocks left > 5 st1 { v9.16b}, [x2], #16 //AES final-5 block - store result rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid ldr q9, [x0], #16 //AES final-4 block - load plaintext pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high ins v27.d[1], v27.d[0] //GHASH final-5 block - mid pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid .inst 0xce037529 //eor3 v9.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid .L192_enc_blocks_more_than_4: //blocks left > 4 st1 { v9.16b}, [x2], #16 //AES final-4 block - store result rev64 v8.16b, v9.16b //GHASH final-4 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-3 block - load plaintext pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high ins v27.d[0], v8.d[1] //GHASH final-4 block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid .inst 0xce047529 //eor3 v9.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result .L192_enc_blocks_more_than_3: //blocks left > 3 ldr q24, [x6, #64] //load h4k | h3k st1 { v9.16b}, [x2], #16 //AES final-3 block - store result rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ldr q9, [x0], #16 //AES final-2 block - load plaintext ldr q25, [x6, #80] //load h4l | h4h ins v27.d[0], v8.d[1] //GHASH final-3 block - mid .inst 0xce057529 //eor3 v9.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ins v27.d[1], v27.d[0] //GHASH final-3 block - mid pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high .L192_enc_blocks_more_than_2: //blocks left > 2 st1 { v9.16b}, [x2], #16 //AES final-2 block - store result rev64 v8.16b, v9.16b //GHASH final-2 block ldr q23, [x6, #48] //load h3l | h3h eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-1 block - load plaintext ins v27.d[0], v8.d[1] //GHASH final-2 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high movi v16.8b, #0 //supress further partial tag feed in pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid .inst 0xce067529 //eor3 v9.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result .L192_enc_blocks_more_than_1: //blocks left > 1 ldr q22, [x6, #32] //load h1l | h1h st1 { v9.16b}, [x2], #16 //AES final-1 block - store result rev64 v8.16b, v9.16b //GHASH final-1 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-1 block - mid pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ldr q9, [x0], #16 //AES final block - load plaintext ldr q21, [x6, #16] //load h2k | h1k ins v27.d[1], v27.d[0] //GHASH final-1 block - mid .inst 0xce077529 //eor3 v9.16b, v9.16b, v7.16b, v29.16b //AES final block - result pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high .L192_enc_blocks_less_than_1: //blocks left <= 1 mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 mvn x8, xzr //temp1_x = 0xffffffffffffffff csel x13, x8, x7, lt csel x14, x7, xzr, lt mov v0.d[1], x14 ldr q20, [x6] //load h1l | h1h ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored mov v0.d[0], x13 //ctr0b is mask for last block and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits rev64 v8.16b, v9.16b //GHASH final block bif v9.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing st1 { v9.16b}, [x2] //store all 16B eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v16.d[0], v8.d[1] //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high eor v17.16b, v17.16b, v28.16b //GHASH final block - high pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v16.8b, v16.8b, v8.8b //GHASH final block - mid pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant eor v19.16b, v19.16b, v26.16b //GHASH final block - low ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment rev32 v30.16b, v30.16b str q30, [x16] //store the updated counter .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 //return sizes ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret .L192_enc_ret: mov w0, #0x0 ret .size aesv8_gcm_8x_enc_192,.-aesv8_gcm_8x_enc_192 .globl aesv8_gcm_8x_dec_192 .hidden aesv8_gcm_8x_dec_192 .type aesv8_gcm_8x_dec_192,%function .align 4 aesv8_gcm_8x_dec_192: AARCH64_VALID_CALL_TARGET cbz x1, .L192_dec_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 mov x5, x9 ld1 { v0.16b}, [x16] //CTR block 0 ld1 { v19.16b}, [x3] mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 sub x5, x5, #1 //byte_len - 1 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 add x4, x0, x1, lsr #3 //end_input_ptr aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 9 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b ldp q27, q28, [x11, #160] //load rk10, rk11 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 9 add x5, x5, x0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 9 cmp x0, x5 //check if we have <= 8 blocks aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 10 ldr q26, [x11, #192] //load rk12 aese v0.16b, v28.16b //AES block 0 - round 11 aese v1.16b, v28.16b //AES block 1 - round 11 aese v4.16b, v28.16b //AES block 4 - round 11 aese v6.16b, v28.16b //AES block 6 - round 11 aese v5.16b, v28.16b //AES block 5 - round 11 aese v7.16b, v28.16b //AES block 7 - round 11 aese v2.16b, v28.16b //AES block 2 - round 11 aese v3.16b, v28.16b //AES block 3 - round 11 b.ge .L192_dec_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load ciphertext ldp q10, q11, [x0], #32 //AES block 2, 3 - load ciphertext ldp q12, q13, [x0], #32 //AES block 4, 5 - load ciphertext .inst 0xce016921 //eor3 v1.16b, v9.16b, v1.16b, v26.16b //AES block 1 - result .inst 0xce006900 //eor3 v0.16b, v8.16b, v0.16b, v26.16b //AES block 0 - result stp q0, q1, [x2], #32 //AES block 0, 1 - store result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 .inst 0xce036963 //eor3 v3.16b, v11.16b, v3.16b, v26.16b //AES block 3 - result .inst 0xce026942 //eor3 v2.16b, v10.16b, v2.16b, v26.16b //AES block 2 - result stp q2, q3, [x2], #32 //AES block 2, 3 - store result ldp q14, q15, [x0], #32 //AES block 6, 7 - load ciphertext rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 .inst 0xce046984 //eor3 v4.16b, v12.16b, v4.16b, v26.16b //AES block 4 - result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 .inst 0xce0569a5 //eor3 v5.16b, v13.16b, v5.16b, v26.16b //AES block 5 - result stp q4, q5, [x2], #32 //AES block 4, 5 - store result cmp x0, x5 //check if we have <= 8 blocks .inst 0xce0669c6 //eor3 v6.16b, v14.16b, v6.16b, v26.16b //AES block 6 - result .inst 0xce0769e7 //eor3 v7.16b, v15.16b, v7.16b, v26.16b //AES block 7 - result rev32 v4.16b, v30.16b //CTR block 12 add v30.4s, v30.4s, v31.4s //CTR block 12 stp q6, q7, [x2], #32 //AES block 6, 7 - store result b.ge .L192_dec_prepretail //do prepretail .L192_dec_main_loop: //main loop start rev64 v9.16b, v9.16b //GHASH block 8k+1 ldp q26, q27, [x11, #0] //load rk0, rk1 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v8.16b, v8.16b //GHASH block 8k rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v12.16b, v12.16b //GHASH block 8k+4 rev64 v11.16b, v11.16b //GHASH block 8k+3 eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 rev64 v13.16b, v13.16b //GHASH block 8k+5 rev32 v7.16b, v30.16b //CTR block 8k+15 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high ldp q28, q26, [x11, #32] //load rk2, rk3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid rev64 v10.16b, v10.16b //GHASH block 8k+2 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid add v30.4s, v30.4s, v31.4s //CTR block 8k+15 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 rev64 v14.16b, v14.16b //GHASH block 8k+6 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid ldr d16, [x10] //MODULO - load modulo constant pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high rev32 v20.16b, v30.16b //CTR block 8k+16 add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 ldp q27, q28, [x11, #160] //load rk10, rk11 .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load ciphertext aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load ciphertext rev32 v22.16b, v30.16b //CTR block 8k+17 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load ciphertext rev32 v23.16b, v30.16b //CTR block 8k+18 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 ldr q26, [x11, #192] //load rk12 ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load ciphertext aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v0.16b, v28.16b //AES block 8k+8 - round 11 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v1.16b, v28.16b //AES block 8k+9 - round 11 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v6.16b, v28.16b //AES block 8k+14 - round 11 aese v3.16b, v28.16b //AES block 8k+11 - round 11 .inst 0xce006900 //eor3 v0.16b, v8.16b, v0.16b, v26.16b //AES block 8k+8 - result rev32 v25.16b, v30.16b //CTR block 8k+19 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v4.16b, v28.16b //AES block 8k+12 - round 11 aese v2.16b, v28.16b //AES block 8k+10 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 aese v7.16b, v28.16b //AES block 8k+15 - round 11 aese v5.16b, v28.16b //AES block 8k+13 - round 11 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low .inst 0xce016921 //eor3 v1.16b, v9.16b, v1.16b, v26.16b //AES block 8k+9 - result stp q0, q1, [x2], #32 //AES block 8k+8, 8k+9 - store result .inst 0xce036963 //eor3 v3.16b, v11.16b, v3.16b, v26.16b //AES block 8k+11 - result .inst 0xce026942 //eor3 v2.16b, v10.16b, v2.16b, v26.16b //AES block 8k+10 - result .inst 0xce0769e7 //eor3 v7.16b, v15.16b, v7.16b, v26.16b //AES block 8k+15 - result stp q2, q3, [x2], #32 //AES block 8k+10, 8k+11 - store result .inst 0xce0569a5 //eor3 v5.16b, v13.16b, v5.16b, v26.16b //AES block 8k+13 - result .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low mov v3.16b, v25.16b //CTR block 8k+19 .inst 0xce046984 //eor3 v4.16b, v12.16b, v4.16b, v26.16b //AES block 8k+12 - result stp q4, q5, [x2], #32 //AES block 8k+12, 8k+13 - store result cmp x0, x5 //.LOOP CONTROL .inst 0xce0669c6 //eor3 v6.16b, v14.16b, v6.16b, v26.16b //AES block 8k+14 - result stp q6, q7, [x2], #32 //AES block 8k+14, 8k+15 - store result mov v0.16b, v20.16b //CTR block 8k+16 mov v1.16b, v22.16b //CTR block 8k+17 mov v2.16b, v23.16b //CTR block 8k+18 rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 b.lt .L192_dec_main_loop .L192_dec_prepretail: //PREPRETAIL ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v8.16b, v8.16b //GHASH block 8k ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v11.16b, v11.16b //GHASH block 8k+3 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 eor v8.16b, v8.16b, v19.16b //PRE 1 rev64 v10.16b, v10.16b //GHASH block 8k+2 rev64 v9.16b, v9.16b //GHASH block 8k+1 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev32 v7.16b, v30.16b //CTR block 8k+15 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 rev64 v13.16b, v13.16b //GHASH block 8k+5 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low ldp q27, q28, [x11, #64] //load rk4, rk5 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 rev64 v15.16b, v15.16b //GHASH block 8k+7 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid rev64 v12.16b, v12.16b //GHASH block 8k+4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 rev64 v14.16b, v14.16b //GHASH block 8k+6 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 ldr d16, [x10] //MODULO - load modulo constant .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 ldp q27, q28, [x11, #160] //load rk10, rk11 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ldr q26, [x11, #192] //load rk12 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v0.16b, v28.16b //AES block 8k+8 - round 11 .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low aese v5.16b, v28.16b //AES block 8k+13 - round 11 aese v2.16b, v28.16b //AES block 8k+10 - round 11 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v6.16b, v28.16b //AES block 8k+14 - round 11 aese v4.16b, v28.16b //AES block 8k+12 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v3.16b, v28.16b //AES block 8k+11 - round 11 aese v1.16b, v28.16b //AES block 8k+9 - round 11 aese v7.16b, v28.16b //AES block 8k+15 - round 11 .L192_dec_tail: //TAIL sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process ldp q20, q21, [x6, #96] //load h5l | h5h ldr q9, [x0], #16 //AES block 8k+8 - load ciphertext ldp q24, q25, [x6, #160] //load h8k | h7k mov v29.16b, v26.16b ldp q22, q23, [x6, #128] //load h6l | h6h ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag .inst 0xce00752c //eor3 v12.16b, v9.16b, v0.16b, v29.16b //AES block 8k+8 - result cmp x5, #112 b.gt .L192_dec_blocks_more_than_7 mov v7.16b, v6.16b movi v17.8b, #0 sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b cmp x5, #96 movi v19.8b, #0 mov v3.16b, v2.16b mov v2.16b, v1.16b movi v18.8b, #0 b.gt .L192_dec_blocks_more_than_6 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b sub v30.4s, v30.4s, v31.4s cmp x5, #80 b.gt .L192_dec_blocks_more_than_5 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b cmp x5, #64 sub v30.4s, v30.4s, v31.4s b.gt .L192_dec_blocks_more_than_4 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v1.16b cmp x5, #48 b.gt .L192_dec_blocks_more_than_3 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b cmp x5, #32 mov v6.16b, v1.16b ldr q24, [x6, #64] //load h4k | h3k b.gt .L192_dec_blocks_more_than_2 sub v30.4s, v30.4s, v31.4s mov v7.16b, v1.16b cmp x5, #16 b.gt .L192_dec_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b .L192_dec_blocks_less_than_1 .L192_dec_blocks_more_than_7: //blocks left > 7 rev64 v8.16b, v9.16b //GHASH final-7 block ins v18.d[0], v24.d[1] //GHASH final-7 block - mid eor v8.16b, v8.16b, v16.16b //feed in partial tag pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high ins v27.d[0], v8.d[1] //GHASH final-7 block - mid ldr q9, [x0], #16 //AES final-6 block - load ciphertext pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid st1 { v12.16b}, [x2], #16 //AES final-7 block - store result .inst 0xce01752c //eor3 v12.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in .L192_dec_blocks_more_than_6: //blocks left > 6 rev64 v8.16b, v9.16b //GHASH final-6 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-5 block - load ciphertext ins v27.d[0], v8.d[1] //GHASH final-6 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high st1 { v12.16b}, [x2], #16 //AES final-6 block - store result .inst 0xce02752c //eor3 v12.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low .L192_dec_blocks_more_than_5: //blocks left > 5 rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid ins v27.d[1], v27.d[0] //GHASH final-5 block - mid pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high ldr q9, [x0], #16 //AES final-4 block - load ciphertext eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low movi v16.8b, #0 //supress further partial tag feed in st1 { v12.16b}, [x2], #16 //AES final-5 block - store result eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid .inst 0xce03752c //eor3 v12.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result .L192_dec_blocks_more_than_4: //blocks left > 4 rev64 v8.16b, v9.16b //GHASH final-4 block eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ldr q9, [x0], #16 //AES final-3 block - load ciphertext ins v27.d[0], v8.d[1] //GHASH final-4 block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid st1 { v12.16b}, [x2], #16 //AES final-4 block - store result pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high .inst 0xce04752c //eor3 v12.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high .L192_dec_blocks_more_than_3: //blocks left > 3 ldr q25, [x6, #80] //load h4l | h4h rev64 v8.16b, v9.16b //GHASH final-3 block ldr q9, [x0], #16 //AES final-2 block - load ciphertext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-3 block - mid pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high movi v16.8b, #0 //supress further partial tag feed in pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low st1 { v12.16b}, [x2], #16 //AES final-3 block - store result eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid .inst 0xce05752c //eor3 v12.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low ldr q24, [x6, #64] //load h4k | h3k ins v27.d[1], v27.d[0] //GHASH final-3 block - mid pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid .L192_dec_blocks_more_than_2: //blocks left > 2 rev64 v8.16b, v9.16b //GHASH final-2 block ldr q23, [x6, #48] //load h3l | h3h eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-2 block - mid ldr q9, [x0], #16 //AES final-1 block - load ciphertext pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low st1 { v12.16b}, [x2], #16 //AES final-2 block - store result eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid .inst 0xce06752c //eor3 v12.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result .L192_dec_blocks_more_than_1: //blocks left > 1 rev64 v8.16b, v9.16b //GHASH final-1 block ldr q9, [x0], #16 //AES final block - load ciphertext ldr q22, [x6, #32] //load h1l | h1h eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ldr q21, [x6, #16] //load h2k | h1k pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low ins v27.d[0], v8.d[1] //GHASH final-1 block - mid st1 { v12.16b}, [x2], #16 //AES final-1 block - store result pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high .inst 0xce07752c //eor3 v12.16b, v9.16b, v7.16b, v29.16b //AES final block - result eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ins v27.d[1], v27.d[0] //GHASH final-1 block - mid pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high .L192_dec_blocks_less_than_1: //blocks left <= 1 rev32 v30.16b, v30.16b and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 str q30, [x16] //store the updated counter neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 mvn x8, xzr //temp1_x = 0xffffffffffffffff lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 csel x13, x8, x7, lt csel x14, x7, xzr, lt ldr q20, [x6] //load h1l | h1h mov v0.d[1], x14 ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored mov v0.d[0], x13 //ctr0b is mask for last block and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits bif v12.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing rev64 v8.16b, v9.16b //GHASH final block st1 { v12.16b}, [x2] //store all 16B eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v16.d[0], v8.d[1] //GHASH final block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v16.8b, v16.8b, v8.8b //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high eor v19.16b, v19.16b, v26.16b //GHASH final block - low pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid eor v17.16b, v17.16b, v28.16b //GHASH final block - high eor v14.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid ext v17.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment eor v18.16b, v18.16b, v14.16b //MODULO - karatsuba tidy up .inst 0xce115652 //eor3 v18.16b, v18.16b, v17.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v18.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .inst 0xce124673 //eor3 v19.16b, v19.16b, v18.16b, v17.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret .L192_dec_ret: mov w0, #0x0 ret .size aesv8_gcm_8x_dec_192,.-aesv8_gcm_8x_dec_192 .globl aesv8_gcm_8x_enc_256 .hidden aesv8_gcm_8x_enc_256 .type aesv8_gcm_8x_enc_256,%function .align 4 aesv8_gcm_8x_enc_256: AARCH64_VALID_CALL_TARGET cbz x1, .L256_enc_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 ld1 { v0.16b}, [x16] //CTR block 0 mov x5, x9 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 sub x5, x5, #1 //byte_len - 1 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) add x5, x5, x0 rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b ldp q27, q28, [x11, #160] //load rk10, rk11 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 9 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 10 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 9 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 10 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 11 ldp q26, q27, [x11, #192] //load rk12, rk13 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 11 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 11 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 11 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 11 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 11 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 7 ldr q28, [x11, #224] //load rk14 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 12 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 12 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 12 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 12 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 12 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 12 aese v2.16b, v27.16b //AES block 2 - round 13 aese v1.16b, v27.16b //AES block 1 - round 13 aese v4.16b, v27.16b //AES block 4 - round 13 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 12 aese v0.16b, v27.16b //AES block 0 - round 13 aese v5.16b, v27.16b //AES block 5 - round 13 aese v6.16b, v27.16b //AES block 6 - round 13 aese v7.16b, v27.16b //AES block 7 - round 13 aese v3.16b, v27.16b //AES block 3 - round 13 add x4, x0, x1, lsr #3 //end_input_ptr cmp x0, x5 //check if we have <= 8 blocks b.ge .L256_enc_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load plaintext ldp q10, q11, [x0], #32 //AES block 2, 3 - load plaintext .inst 0xce007108 //eor3 v8.16b, v8.16b, v0.16b, v28.16b //AES block 0 - result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 .inst 0xce017129 //eor3 v9.16b, v9.16b, v1.16b, v28.16b //AES block 1 - result .inst 0xce03716b //eor3 v11.16b, v11.16b, v3.16b, v28.16b //AES block 3 - result rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 ldp q12, q13, [x0], #32 //AES block 4, 5 - load plaintext ldp q14, q15, [x0], #32 //AES block 6, 7 - load plaintext .inst 0xce02714a //eor3 v10.16b, v10.16b, v2.16b, v28.16b //AES block 2 - result cmp x0, x5 //check if we have <= 8 blocks rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 stp q8, q9, [x2], #32 //AES block 0, 1 - store result stp q10, q11, [x2], #32 //AES block 2, 3 - store result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 .inst 0xce04718c //eor3 v12.16b, v12.16b, v4.16b, v28.16b //AES block 4 - result .inst 0xce0771ef //eor3 v15.16b, v15.16b, v7.16b, v28.16b //AES block 7 - result .inst 0xce0671ce //eor3 v14.16b, v14.16b, v6.16b, v28.16b //AES block 6 - result .inst 0xce0571ad //eor3 v13.16b, v13.16b, v5.16b, v28.16b //AES block 5 - result stp q12, q13, [x2], #32 //AES block 4, 5 - store result rev32 v4.16b, v30.16b //CTR block 12 stp q14, q15, [x2], #32 //AES block 6, 7 - store result add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge .L256_enc_prepretail //do prepretail .L256_enc_main_loop: //main loop start ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev64 v11.16b, v11.16b //GHASH block 8k+3 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev64 v9.16b, v9.16b //GHASH block 8k+1 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 rev64 v8.16b, v8.16b //GHASH block 8k rev64 v12.16b, v12.16b //GHASH block 8k+4 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 rev32 v7.16b, v30.16b //CTR block 8k+15 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 eor v8.16b, v8.16b, v19.16b //PRE 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 rev64 v14.16b, v14.16b //GHASH block 8k+6 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 rev64 v10.16b, v10.16b //GHASH block 8k+2 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high rev64 v13.16b, v13.16b //GHASH block 8k+5 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid ldp q26, q27, [x11, #96] //load rk6, rk7 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 ldp q27, q28, [x11, #160] //load rk10, rk11 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low ldr d16, [x10] //MODULO - load modulo constant pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high ldp q26, q27, [x11, #192] //load rk12, rk13 rev32 v20.16b, v30.16b //CTR block 8k+16 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load plaintext aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 11 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 11 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 11 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 11 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 12 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 11 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 12 rev32 v22.16b, v30.16b //CTR block 8k+17 add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 11 .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 12 ldr q28, [x11, #224] //load rk14 aese v7.16b, v27.16b //AES block 8k+15 - round 13 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load plaintext aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 12 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 12 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 12 ldp q12, q13, [x0], #32 //AES block 4, 5 - load plaintext ldp q14, q15, [x0], #32 //AES block 6, 7 - load plaintext aese v2.16b, v27.16b //AES block 8k+10 - round 13 aese v4.16b, v27.16b //AES block 8k+12 - round 13 rev32 v23.16b, v30.16b //CTR block 8k+18 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 aese v5.16b, v27.16b //AES block 8k+13 - round 13 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 12 aese v3.16b, v27.16b //AES block 8k+11 - round 13 cmp x0, x5 //.LOOP CONTROL .inst 0xce02714a //eor3 v10.16b, v10.16b, v2.16b, v28.16b //AES block 8k+10 - result rev32 v25.16b, v30.16b //CTR block 8k+19 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 aese v0.16b, v27.16b //AES block 8k+8 - round 13 aese v6.16b, v27.16b //AES block 8k+14 - round 13 .inst 0xce0571ad //eor3 v13.16b, v13.16b, v5.16b, v28.16b //AES block 5 - result ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v1.16b, v27.16b //AES block 8k+9 - round 13 .inst 0xce04718c //eor3 v12.16b, v12.16b, v4.16b, v28.16b //AES block 4 - result rev32 v4.16b, v30.16b //CTR block 8k+20 .inst 0xce03716b //eor3 v11.16b, v11.16b, v3.16b, v28.16b //AES block 8k+11 - result mov v3.16b, v25.16b //CTR block 8k+19 .inst 0xce017129 //eor3 v9.16b, v9.16b, v1.16b, v28.16b //AES block 8k+9 - result .inst 0xce007108 //eor3 v8.16b, v8.16b, v0.16b, v28.16b //AES block 8k+8 - result add v30.4s, v30.4s, v31.4s //CTR block 8k+20 stp q8, q9, [x2], #32 //AES block 8k+8, 8k+9 - store result mov v2.16b, v23.16b //CTR block 8k+18 .inst 0xce0771ef //eor3 v15.16b, v15.16b, v7.16b, v28.16b //AES block 7 - result .inst 0xce154673 //eor3 v19.16b, v19.16b, v21.16b, v17.16b //MODULO - fold into low stp q10, q11, [x2], #32 //AES block 8k+10, 8k+11 - store result .inst 0xce0671ce //eor3 v14.16b, v14.16b, v6.16b, v28.16b //AES block 6 - result mov v1.16b, v22.16b //CTR block 8k+17 stp q12, q13, [x2], #32 //AES block 4, 5 - store result stp q14, q15, [x2], #32 //AES block 6, 7 - store result mov v0.16b, v20.16b //CTR block 8k+16 b.lt .L256_enc_main_loop .L256_enc_prepretail: //PREPRETAIL rev32 v5.16b, v30.16b //CTR block 8k+13 ldp q26, q27, [x11, #0] //load rk0, rk1 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v10.16b, v10.16b //GHASH block 8k+2 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 rev64 v13.16b, v13.16b //GHASH block 8k+5 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev32 v7.16b, v30.16b //CTR block 8k+15 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v8.16b, v8.16b //GHASH block 8k aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 rev64 v9.16b, v9.16b //GHASH block 8k+1 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 eor v8.16b, v8.16b, v19.16b //PRE 1 rev64 v11.16b, v11.16b //GHASH block 8k+3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high rev64 v14.16b, v14.16b //GHASH block 8k+6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid rev64 v12.16b, v12.16b //GHASH block 8k+4 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h ldp q28, q26, [x11, #128] //load rk8, rk9 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high ldp q27, q28, [x11, #160] //load rk10, rk11 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid ldr d16, [x10] //MODULO - load modulo constant .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 11 ldp q26, q27, [x11, #192] //load rk12, rk13 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 11 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 11 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 11 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 11 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 11 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 11 ldr q28, [x11, #224] //load rk14 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 12 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 12 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 12 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 12 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 12 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 12 aese v0.16b, v27.16b //AES block 8k+8 - round 13 .inst 0xce154673 //eor3 v19.16b, v19.16b, v21.16b, v17.16b //MODULO - fold into low aese v5.16b, v27.16b //AES block 8k+13 - round 13 aese v1.16b, v27.16b //AES block 8k+9 - round 13 aese v3.16b, v27.16b //AES block 8k+11 - round 13 aese v4.16b, v27.16b //AES block 8k+12 - round 13 aese v7.16b, v27.16b //AES block 8k+15 - round 13 aese v2.16b, v27.16b //AES block 8k+10 - round 13 aese v6.16b, v27.16b //AES block 8k+14 - round 13 .L256_enc_tail: //TAIL ldp q24, q25, [x6, #160] //load h8l | h8h sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process ldr q8, [x0], #16 //AES block 8k+8 - load plaintext ldp q20, q21, [x6, #96] //load h5l | h5h ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag ldp q22, q23, [x6, #128] //load h6l | h6h mov v29.16b, v28.16b cmp x5, #112 .inst 0xce007509 //eor3 v9.16b, v8.16b, v0.16b, v29.16b //AES block 8k+8 - result b.gt .L256_enc_blocks_more_than_7 movi v19.8b, #0 mov v7.16b, v6.16b movi v17.8b, #0 mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v2.16b sub v30.4s, v30.4s, v31.4s mov v2.16b, v1.16b movi v18.8b, #0 cmp x5, #96 b.gt .L256_enc_blocks_more_than_6 mov v7.16b, v6.16b mov v6.16b, v5.16b cmp x5, #80 mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt .L256_enc_blocks_more_than_5 mov v7.16b, v6.16b sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b cmp x5, #64 mov v4.16b, v1.16b b.gt .L256_enc_blocks_more_than_4 cmp x5, #48 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt .L256_enc_blocks_more_than_3 cmp x5, #32 mov v7.16b, v6.16b ldr q24, [x6, #64] //load h4k | h3k mov v6.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt .L256_enc_blocks_more_than_2 mov v7.16b, v1.16b sub v30.4s, v30.4s, v31.4s cmp x5, #16 b.gt .L256_enc_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b .L256_enc_blocks_less_than_1 .L256_enc_blocks_more_than_7: //blocks left > 7 st1 { v9.16b}, [x2], #16 //AES final-7 block - store result rev64 v8.16b, v9.16b //GHASH final-7 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-6 block - load plaintext pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high ins v27.d[0], v8.d[1] //GHASH final-7 block - mid ins v18.d[0], v24.d[1] //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid .inst 0xce017529 //eor3 v9.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low .L256_enc_blocks_more_than_6: //blocks left > 6 st1 { v9.16b}, [x2], #16 //AES final-6 block - store result rev64 v8.16b, v9.16b //GHASH final-6 block eor v8.16b, v8.16b, v16.16b //feed in partial tag pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low ins v27.d[0], v8.d[1] //GHASH final-6 block - mid pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high ldr q9, [x0], #16 //AES final-5 block - load plaintext eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid .inst 0xce027529 //eor3 v9.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high .L256_enc_blocks_more_than_5: //blocks left > 5 st1 { v9.16b}, [x2], #16 //AES final-5 block - store result rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid ins v27.d[1], v27.d[0] //GHASH final-5 block - mid ldr q9, [x0], #16 //AES final-4 block - load plaintext pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid .inst 0xce037529 //eor3 v9.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result .L256_enc_blocks_more_than_4: //blocks left > 4 st1 { v9.16b}, [x2], #16 //AES final-4 block - store result rev64 v8.16b, v9.16b //GHASH final-4 block ldr q9, [x0], #16 //AES final-3 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-4 block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high .inst 0xce047529 //eor3 v9.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high .L256_enc_blocks_more_than_3: //blocks left > 3 st1 { v9.16b}, [x2], #16 //AES final-3 block - store result ldr q25, [x6, #80] //load h4l | h4h rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-3 block - mid pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ldr q24, [x6, #64] //load h4k | h3k ins v27.d[1], v27.d[0] //GHASH final-3 block - mid ldr q9, [x0], #16 //AES final-2 block - load plaintext pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low .inst 0xce057529 //eor3 v9.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low .L256_enc_blocks_more_than_2: //blocks left > 2 ldr q23, [x6, #48] //load h3l | h3h st1 { v9.16b}, [x2], #16 //AES final-2 block - store result rev64 v8.16b, v9.16b //GHASH final-2 block ldr q9, [x0], #16 //AES final-1 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-2 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high .inst 0xce067529 //eor3 v9.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low .L256_enc_blocks_more_than_1: //blocks left > 1 st1 { v9.16b}, [x2], #16 //AES final-1 block - store result ldr q22, [x6, #32] //load h2l | h2h rev64 v8.16b, v9.16b //GHASH final-1 block ldr q9, [x0], #16 //AES final block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-1 block - mid pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high .inst 0xce077529 //eor3 v9.16b, v9.16b, v7.16b, v29.16b //AES final block - result eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ldr q21, [x6, #16] //load h2k | h1k eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low ins v27.d[1], v27.d[0] //GHASH final-1 block - mid pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid .L256_enc_blocks_less_than_1: //blocks left <= 1 and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 mvn x8, xzr //temp1_x = 0xffffffffffffffff csel x14, x7, xzr, lt csel x13, x8, x7, lt mov v0.d[0], x13 //ctr0b is mask for last block ldr q20, [x6] //load h1l | h1h ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored mov v0.d[1], x14 and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits rev64 v8.16b, v9.16b //GHASH final block rev32 v30.16b, v30.16b bif v9.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing str q30, [x16] //store the updated counter eor v8.16b, v8.16b, v16.16b //feed in partial tag st1 { v9.16b}, [x2] //store all 16B ins v16.d[0], v8.d[1] //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v17.16b, v17.16b, v28.16b //GHASH final block - high eor v19.16b, v19.16b, v26.16b //GHASH final block - low eor v16.8b, v16.8b, v8.8b //GHASH final block - mid pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .inst 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 //return sizes ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret .L256_enc_ret: mov w0, #0x0 ret .size aesv8_gcm_8x_enc_256,.-aesv8_gcm_8x_enc_256 .globl aesv8_gcm_8x_dec_256 .hidden aesv8_gcm_8x_dec_256 .type aesv8_gcm_8x_dec_256,%function .align 4 aesv8_gcm_8x_dec_256: AARCH64_VALID_CALL_TARGET cbz x1, .L256_dec_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 ld1 { v0.16b}, [x16] //CTR block 0 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 mov x5, x9 sub x5, x5, #1 //byte_len - 1 rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 9 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b ldp q27, q28, [x11, #160] //load rk10, rk11 add x4, x0, x1, lsr #3 //end_input_ptr add x5, x5, x0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 9 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 9 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 10 ldp q26, q27, [x11, #192] //load rk12, rk13 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 11 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 11 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 11 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 11 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 11 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 11 ldr q28, [x11, #224] //load rk14 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 12 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 12 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 12 cmp x0, x5 //check if we have <= 8 blocks aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 12 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 12 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 12 aese v5.16b, v27.16b //AES block 5 - round 13 aese v1.16b, v27.16b //AES block 1 - round 13 aese v2.16b, v27.16b //AES block 2 - round 13 aese v0.16b, v27.16b //AES block 0 - round 13 aese v4.16b, v27.16b //AES block 4 - round 13 aese v6.16b, v27.16b //AES block 6 - round 13 aese v3.16b, v27.16b //AES block 3 - round 13 aese v7.16b, v27.16b //AES block 7 - round 13 b.ge .L256_dec_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load ciphertext ldp q10, q11, [x0], #32 //AES block 2, 3 - load ciphertext ldp q12, q13, [x0], #32 //AES block 4, 5 - load ciphertext ldp q14, q15, [x0], #32 //AES block 6, 7 - load ciphertext cmp x0, x5 //check if we have <= 8 blocks .inst 0xce017121 //eor3 v1.16b, v9.16b, v1.16b, v28.16b //AES block 1 - result .inst 0xce007100 //eor3 v0.16b, v8.16b, v0.16b, v28.16b //AES block 0 - result stp q0, q1, [x2], #32 //AES block 0, 1 - store result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 .inst 0xce037163 //eor3 v3.16b, v11.16b, v3.16b, v28.16b //AES block 3 - result .inst 0xce0571a5 //eor3 v5.16b, v13.16b, v5.16b, v28.16b //AES block 5 - result .inst 0xce047184 //eor3 v4.16b, v12.16b, v4.16b, v28.16b //AES block 4 - result rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 .inst 0xce027142 //eor3 v2.16b, v10.16b, v2.16b, v28.16b //AES block 2 - result stp q2, q3, [x2], #32 //AES block 2, 3 - store result rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 .inst 0xce0671c6 //eor3 v6.16b, v14.16b, v6.16b, v28.16b //AES block 6 - result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 stp q4, q5, [x2], #32 //AES block 4, 5 - store result .inst 0xce0771e7 //eor3 v7.16b, v15.16b, v7.16b, v28.16b //AES block 7 - result stp q6, q7, [x2], #32 //AES block 6, 7 - store result rev32 v4.16b, v30.16b //CTR block 12 add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge .L256_dec_prepretail //do prepretail .L256_dec_main_loop: //main loop start rev32 v5.16b, v30.16b //CTR block 8k+13 ldp q26, q27, [x11, #0] //load rk0, rk1 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v9.16b, v9.16b //GHASH block 8k+1 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 rev64 v8.16b, v8.16b //GHASH block 8k ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v12.16b, v12.16b //GHASH block 8k+4 rev64 v11.16b, v11.16b //GHASH block 8k+3 rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v15.16b, v15.16b //GHASH block 8k+7 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 eor v8.16b, v8.16b, v19.16b //PRE 1 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 rev64 v10.16b, v10.16b //GHASH block 8k+2 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low ldp q26, q27, [x11, #96] //load rk6, rk7 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid rev64 v13.16b, v13.16b //GHASH block 8k+5 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h rev64 v14.16b, v14.16b //GHASH block 8k+6 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 ldp q27, q28, [x11, #160] //load rk10, rk11 pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid add v30.4s, v30.4s, v31.4s //CTR block 8k+15 .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load ciphertext eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low rev32 v20.16b, v30.16b //CTR block 8k+16 ldr d16, [x10] //MODULO - load modulo constant add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 11 ldp q26, q27, [x11, #192] //load rk12, rk13 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 11 .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid rev32 v22.16b, v30.16b //CTR block 8k+17 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 11 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load ciphertext aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 11 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 11 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 12 rev32 v23.16b, v30.16b //CTR block 8k+18 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 12 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 11 ldr q28, [x11, #224] //load rk14 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 12 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 12 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 12 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 12 ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load ciphertext aese v1.16b, v27.16b //AES block 8k+9 - round 13 aese v2.16b, v27.16b //AES block 8k+10 - round 13 ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load ciphertext aese v0.16b, v27.16b //AES block 8k+8 - round 13 aese v5.16b, v27.16b //AES block 8k+13 - round 13 rev32 v25.16b, v30.16b //CTR block 8k+19 .inst 0xce027142 //eor3 v2.16b, v10.16b, v2.16b, v28.16b //AES block 8k+10 - result .inst 0xce017121 //eor3 v1.16b, v9.16b, v1.16b, v28.16b //AES block 8k+9 - result ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v7.16b, v27.16b //AES block 8k+15 - round 13 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v4.16b, v27.16b //AES block 8k+12 - round 13 .inst 0xce0571a5 //eor3 v5.16b, v13.16b, v5.16b, v28.16b //AES block 8k+13 - result .inst 0xce007100 //eor3 v0.16b, v8.16b, v0.16b, v28.16b //AES block 8k+8 - result aese v3.16b, v27.16b //AES block 8k+11 - round 13 stp q0, q1, [x2], #32 //AES block 8k+8, 8k+9 - store result mov v0.16b, v20.16b //CTR block 8k+16 .inst 0xce047184 //eor3 v4.16b, v12.16b, v4.16b, v28.16b //AES block 8k+12 - result .inst 0xce154673 //eor3 v19.16b, v19.16b, v21.16b, v17.16b //MODULO - fold into low .inst 0xce037163 //eor3 v3.16b, v11.16b, v3.16b, v28.16b //AES block 8k+11 - result stp q2, q3, [x2], #32 //AES block 8k+10, 8k+11 - store result mov v3.16b, v25.16b //CTR block 8k+19 mov v2.16b, v23.16b //CTR block 8k+18 aese v6.16b, v27.16b //AES block 8k+14 - round 13 mov v1.16b, v22.16b //CTR block 8k+17 stp q4, q5, [x2], #32 //AES block 8k+12, 8k+13 - store result .inst 0xce0771e7 //eor3 v7.16b, v15.16b, v7.16b, v28.16b //AES block 8k+15 - result .inst 0xce0671c6 //eor3 v6.16b, v14.16b, v6.16b, v28.16b //AES block 8k+14 - result rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 cmp x0, x5 //.LOOP CONTROL stp q6, q7, [x2], #32 //AES block 8k+14, 8k+15 - store result b.lt .L256_dec_main_loop .L256_dec_prepretail: //PREPRETAIL ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v12.16b, v12.16b //GHASH block 8k+4 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev32 v6.16b, v30.16b //CTR block 8k+14 rev64 v8.16b, v8.16b //GHASH block 8k add v30.4s, v30.4s, v31.4s //CTR block 8k+14 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v9.16b, v9.16b //GHASH block 8k+1 rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v10.16b, v10.16b //GHASH block 8k+2 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 eor v8.16b, v8.16b, v19.16b //PRE 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low rev64 v11.16b, v11.16b //GHASH block 8k+3 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 rev64 v14.16b, v14.16b //GHASH block 8k+6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 .inst 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h rev64 v15.16b, v15.16b //GHASH block 8k+7 rev64 v13.16b, v13.16b //GHASH block 8k+5 .inst 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 .inst 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low ldp q27, q28, [x11, #160] //load rk10, rk11 .inst 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low .inst 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 .inst 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high .inst 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low ldr d16, [x10] //MODULO - load modulo constant .inst 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 .inst 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 ldp q26, q27, [x11, #192] //load rk12, rk13 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 11 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 11 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 11 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 11 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 11 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 11 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 11 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 12 .inst 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v3.16b, v27.16b //AES block 8k+11 - round 13 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 12 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 12 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 12 ldr q28, [x11, #224] //load rk14 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 12 aese v4.16b, v27.16b //AES block 8k+12 - round 13 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 12 aese v6.16b, v27.16b //AES block 8k+14 - round 13 aese v2.16b, v27.16b //AES block 8k+10 - round 13 aese v1.16b, v27.16b //AES block 8k+9 - round 13 aese v5.16b, v27.16b //AES block 8k+13 - round 13 .inst 0xce154673 //eor3 v19.16b, v19.16b, v21.16b, v17.16b //MODULO - fold into low add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v7.16b, v27.16b //AES block 8k+15 - round 13 aese v0.16b, v27.16b //AES block 8k+8 - round 13 .L256_dec_tail: //TAIL ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process cmp x5, #112 ldr q9, [x0], #16 //AES block 8k+8 - load ciphertext ldp q24, q25, [x6, #160] //load h8k | h7k mov v29.16b, v28.16b ldp q20, q21, [x6, #96] //load h5l | h5h .inst 0xce00752c //eor3 v12.16b, v9.16b, v0.16b, v29.16b //AES block 8k+8 - result ldp q22, q23, [x6, #128] //load h6l | h6h b.gt .L256_dec_blocks_more_than_7 mov v7.16b, v6.16b sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b movi v19.8b, #0 movi v17.8b, #0 movi v18.8b, #0 mov v3.16b, v2.16b cmp x5, #96 mov v2.16b, v1.16b b.gt .L256_dec_blocks_more_than_6 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b cmp x5, #80 sub v30.4s, v30.4s, v31.4s mov v4.16b, v3.16b mov v3.16b, v1.16b b.gt .L256_dec_blocks_more_than_5 cmp x5, #64 mov v7.16b, v6.16b sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b b.gt .L256_dec_blocks_more_than_4 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b cmp x5, #48 mov v6.16b, v5.16b mov v5.16b, v1.16b b.gt .L256_dec_blocks_more_than_3 ldr q24, [x6, #64] //load h4k | h3k sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b cmp x5, #32 mov v6.16b, v1.16b b.gt .L256_dec_blocks_more_than_2 sub v30.4s, v30.4s, v31.4s mov v7.16b, v1.16b cmp x5, #16 b.gt .L256_dec_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b .L256_dec_blocks_less_than_1 .L256_dec_blocks_more_than_7: //blocks left > 7 rev64 v8.16b, v9.16b //GHASH final-7 block ldr q9, [x0], #16 //AES final-6 block - load ciphertext st1 { v12.16b}, [x2], #16 //AES final-7 block - store result ins v18.d[0], v24.d[1] //GHASH final-7 block - mid eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-7 block - mid .inst 0xce01752c //eor3 v12.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid .L256_dec_blocks_more_than_6: //blocks left > 6 rev64 v8.16b, v9.16b //GHASH final-6 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-5 block - load ciphertext movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-6 block - mid st1 { v12.16b}, [x2], #16 //AES final-6 block - store result pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low .inst 0xce02752c //eor3 v12.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high .L256_dec_blocks_more_than_5: //blocks left > 5 rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high ins v27.d[0], v8.d[1] //GHASH final-5 block - mid ldr q9, [x0], #16 //AES final-4 block - load ciphertext eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid st1 { v12.16b}, [x2], #16 //AES final-5 block - store result pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low ins v27.d[1], v27.d[0] //GHASH final-5 block - mid pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high .inst 0xce03752c //eor3 v12.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid movi v16.8b, #0 //supress further partial tag feed in .L256_dec_blocks_more_than_4: //blocks left > 4 rev64 v8.16b, v9.16b //GHASH final-4 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-4 block - mid ldr q9, [x0], #16 //AES final-3 block - load ciphertext movi v16.8b, #0 //supress further partial tag feed in pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low st1 { v12.16b}, [x2], #16 //AES final-4 block - store result eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid .inst 0xce04752c //eor3 v12.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result .L256_dec_blocks_more_than_3: //blocks left > 3 ldr q25, [x6, #80] //load h4l | h4h rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-2 block - load ciphertext ldr q24, [x6, #64] //load h4k | h3k ins v27.d[0], v8.d[1] //GHASH final-3 block - mid st1 { v12.16b}, [x2], #16 //AES final-3 block - store result .inst 0xce05752c //eor3 v12.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ins v27.d[1], v27.d[0] //GHASH final-3 block - mid pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high movi v16.8b, #0 //supress further partial tag feed in pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid .L256_dec_blocks_more_than_2: //blocks left > 2 rev64 v8.16b, v9.16b //GHASH final-2 block ldr q23, [x6, #48] //load h3l | h3h ldr q9, [x0], #16 //AES final-1 block - load ciphertext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-2 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low st1 { v12.16b}, [x2], #16 //AES final-2 block - store result .inst 0xce06752c //eor3 v12.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low movi v16.8b, #0 //supress further partial tag feed in pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high .L256_dec_blocks_more_than_1: //blocks left > 1 rev64 v8.16b, v9.16b //GHASH final-1 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-1 block - mid ldr q22, [x6, #32] //load h2l | h2h eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ldr q9, [x0], #16 //AES final block - load ciphertext st1 { v12.16b}, [x2], #16 //AES final-1 block - store result ldr q21, [x6, #16] //load h2k | h1k pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low ins v27.d[1], v27.d[0] //GHASH final-1 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low .inst 0xce07752c //eor3 v12.16b, v9.16b, v7.16b, v29.16b //AES final block - result pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid .L256_dec_blocks_less_than_1: //blocks left <= 1 ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 rev32 v30.16b, v30.16b str q30, [x16] //store the updated counter neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 mvn x8, xzr //temp1_x = 0xffffffffffffffff csel x14, x7, xzr, lt csel x13, x8, x7, lt mov v0.d[0], x13 //ctr0b is mask for last block mov v0.d[1], x14 and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits ldr q20, [x6] //load h1l | h1h bif v12.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing rev64 v8.16b, v9.16b //GHASH final block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v16.d[0], v8.d[1] //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high eor v16.8b, v16.8b, v8.8b //GHASH final block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v17.16b, v17.16b, v28.16b //GHASH final block - high pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant eor v19.16b, v19.16b, v26.16b //GHASH final block - low pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid eor v14.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ext v17.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment st1 { v12.16b}, [x2] //store all 16B eor v18.16b, v18.16b, v14.16b //MODULO - karatsuba tidy up eor v21.16b, v17.16b, v21.16b //MODULO - fold into mid eor v18.16b, v18.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v18.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment eor v19.16b, v19.16b, v17.16b //MODULO - fold into low eor v19.16b, v19.16b, v18.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret .L256_dec_ret: mov w0, #0x0 ret .size aesv8_gcm_8x_dec_256,.-aesv8_gcm_8x_dec_256 .byte 65,69,83,32,71,67,77,32,109,111,100,117,108,101,32,102,111,114,32,65,82,77,118,56,44,32,83,80,68,88,32,66,83,68,45,51,45,67,108,97,117,115,101,32,98,121,32,60,120,105,97,111,107,97,110,103,46,113,105,97,110,64,97,114,109,46,99,111,109,62,0 .align 2 .align 2 #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
7,590
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include "openssl/arm_arch.h" .text .globl beeu_mod_inverse_vartime .hidden beeu_mod_inverse_vartime .type beeu_mod_inverse_vartime, %function .align 4 beeu_mod_inverse_vartime: // Reserve enough space for 14 8-byte registers on the stack // in the first stp call for x29, x30. // Then store the remaining callee-saved registers. // // | x29 | x30 | x19 | x20 | ... | x27 | x28 | x0 | x2 | // ^ ^ // sp <------------------- 112 bytes ----------------> old sp // x29 (FP) // AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-112]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] stp x0,x2,[sp,#96] // B = b3..b0 := a ldp x25,x26,[x1] ldp x27,x28,[x1,#16] // n3..n0 := n // Note: the value of input params are changed in the following. ldp x0,x1,[x2] ldp x2,x30,[x2,#16] // A = a3..a0 := n mov x21, x0 mov x22, x1 mov x23, x2 mov x24, x30 // X = x4..x0 := 1 mov x3, #1 eor x4, x4, x4 eor x5, x5, x5 eor x6, x6, x6 eor x7, x7, x7 // Y = y4..y0 := 0 eor x8, x8, x8 eor x9, x9, x9 eor x10, x10, x10 eor x11, x11, x11 eor x12, x12, x12 .Lbeeu_loop: // if B == 0, jump to .Lbeeu_loop_end orr x14, x25, x26 orr x14, x14, x27 // reverse the bit order of x25. This is needed for clz after this macro rbit x15, x25 orr x14, x14, x28 cbz x14,.Lbeeu_loop_end // 0 < B < |n|, // 0 < A <= |n|, // (1) X*a == B (mod |n|), // (2) (-1)*Y*a == A (mod |n|) // Now divide B by the maximum possible power of two in the // integers, and divide X by the same value mod |n|. // When we're done, (1) still holds. // shift := number of trailing 0s in x25 // ( = number of leading 0s in x15; see the "rbit" instruction in TEST_B_ZERO) clz x13, x15 // If there is no shift, goto shift_A_Y cbz x13, .Lbeeu_shift_A_Y // Shift B right by "x13" bits neg x14, x13 lsr x25, x25, x13 lsl x15, x26, x14 lsr x26, x26, x13 lsl x19, x27, x14 orr x25, x25, x15 lsr x27, x27, x13 lsl x20, x28, x14 orr x26, x26, x19 lsr x28, x28, x13 orr x27, x27, x20 // Shift X right by "x13" bits, adding n whenever X becomes odd. // x13--; // x14 := 0; needed in the addition to the most significant word in SHIFT1 eor x14, x14, x14 .Lbeeu_shift_loop_X: tbz x3, #0, .Lshift1_0 adds x3, x3, x0 adcs x4, x4, x1 adcs x5, x5, x2 adcs x6, x6, x30 adc x7, x7, x14 .Lshift1_0: // var0 := [var1|var0]<64..1>; // i.e. concatenate var1 and var0, // extract bits <64..1> from the resulting 128-bit value // and put them in var0 extr x3, x4, x3, #1 extr x4, x5, x4, #1 extr x5, x6, x5, #1 extr x6, x7, x6, #1 lsr x7, x7, #1 subs x13, x13, #1 bne .Lbeeu_shift_loop_X // Note: the steps above perform the same sequence as in p256_beeu-x86_64-asm.pl // with the following differences: // - "x13" is set directly to the number of trailing 0s in B // (using rbit and clz instructions) // - The loop is only used to call SHIFT1(X) // and x13 is decreased while executing the X loop. // - SHIFT256(B, x13) is performed before right-shifting X; they are independent .Lbeeu_shift_A_Y: // Same for A and Y. // Afterwards, (2) still holds. // Reverse the bit order of x21 // x13 := number of trailing 0s in x21 (= number of leading 0s in x15) rbit x15, x21 clz x13, x15 // If there is no shift, goto |B-A|, X+Y update cbz x13, .Lbeeu_update_B_X_or_A_Y // Shift A right by "x13" bits neg x14, x13 lsr x21, x21, x13 lsl x15, x22, x14 lsr x22, x22, x13 lsl x19, x23, x14 orr x21, x21, x15 lsr x23, x23, x13 lsl x20, x24, x14 orr x22, x22, x19 lsr x24, x24, x13 orr x23, x23, x20 // Shift Y right by "x13" bits, adding n whenever Y becomes odd. // x13--; // x14 := 0; needed in the addition to the most significant word in SHIFT1 eor x14, x14, x14 .Lbeeu_shift_loop_Y: tbz x8, #0, .Lshift1_1 adds x8, x8, x0 adcs x9, x9, x1 adcs x10, x10, x2 adcs x11, x11, x30 adc x12, x12, x14 .Lshift1_1: // var0 := [var1|var0]<64..1>; // i.e. concatenate var1 and var0, // extract bits <64..1> from the resulting 128-bit value // and put them in var0 extr x8, x9, x8, #1 extr x9, x10, x9, #1 extr x10, x11, x10, #1 extr x11, x12, x11, #1 lsr x12, x12, #1 subs x13, x13, #1 bne .Lbeeu_shift_loop_Y .Lbeeu_update_B_X_or_A_Y: // Try T := B - A; if cs, continue with B > A (cs: carry set = no borrow) // Note: this is a case of unsigned arithmetic, where T fits in 4 64-bit words // without taking a sign bit if generated. The lack of a carry would // indicate a negative result. See, for example, // https://community.arm.com/developer/ip-products/processors/b/processors-ip-blog/posts/condition-codes-1-condition-flags-and-codes subs x14, x25, x21 sbcs x15, x26, x22 sbcs x19, x27, x23 sbcs x20, x28, x24 bcs .Lbeeu_B_greater_than_A // Else A > B => // A := A - B; Y := Y + X; goto beginning of the loop subs x21, x21, x25 sbcs x22, x22, x26 sbcs x23, x23, x27 sbcs x24, x24, x28 adds x8, x8, x3 adcs x9, x9, x4 adcs x10, x10, x5 adcs x11, x11, x6 adc x12, x12, x7 b .Lbeeu_loop .Lbeeu_B_greater_than_A: // Continue with B > A => // B := B - A; X := X + Y; goto beginning of the loop mov x25, x14 mov x26, x15 mov x27, x19 mov x28, x20 adds x3, x3, x8 adcs x4, x4, x9 adcs x5, x5, x10 adcs x6, x6, x11 adc x7, x7, x12 b .Lbeeu_loop .Lbeeu_loop_end: // The Euclid's algorithm loop ends when A == gcd(a,n); // this would be 1, when a and n are co-prime (i.e. do not have a common factor). // Since (-1)*Y*a == A (mod |n|), Y>0 // then out = -Y mod n // Verify that A = 1 ==> (-1)*Y*a = A = 1 (mod |n|) // Is A-1 == 0? // If not, fail. sub x14, x21, #1 orr x14, x14, x22 orr x14, x14, x23 orr x14, x14, x24 cbnz x14, .Lbeeu_err // If Y>n ==> Y:=Y-n .Lbeeu_reduction_loop: // x_i := y_i - n_i (X is no longer needed, use it as temp) // (x14 = 0 from above) subs x3, x8, x0 sbcs x4, x9, x1 sbcs x5, x10, x2 sbcs x6, x11, x30 sbcs x7, x12, x14 // If result is non-negative (i.e., cs = carry set = no borrow), // y_i := x_i; goto reduce again // else // y_i := y_i; continue csel x8, x3, x8, cs csel x9, x4, x9, cs csel x10, x5, x10, cs csel x11, x6, x11, cs csel x12, x7, x12, cs bcs .Lbeeu_reduction_loop // Now Y < n (Y cannot be equal to n, since the inverse cannot be 0) // out = -Y = n-Y subs x8, x0, x8 sbcs x9, x1, x9 sbcs x10, x2, x10 sbcs x11, x30, x11 // Save Y in output (out (x0) was saved on the stack) ldr x3, [sp,#96] stp x8, x9, [x3] stp x10, x11, [x3,#16] // return 1 (success) mov x0, #1 b .Lbeeu_finish .Lbeeu_err: // return 0 (error) eor x0, x0, x0 .Lbeeu_finish: // Restore callee-saved registers, except x0, x2 add sp,x29,#0 ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] ldp x23,x24,[sp,#48] ldp x25,x26,[sp,#64] ldp x27,x28,[sp,#80] ldp x29,x30,[sp],#112 AARCH64_VALIDATE_LINK_REGISTER ret .size beeu_mod_inverse_vartime,.-beeu_mod_inverse_vartime #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
39,167
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/p256-armv8-asm.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include "openssl/arm_arch.h" .section .rodata .align 5 .Lpoly: .quad 0xffffffffffffffff,0x00000000ffffffff,0x0000000000000000,0xffffffff00000001 .LRR: // 2^512 mod P precomputed for NIST P256 polynomial .quad 0x0000000000000003,0xfffffffbffffffff,0xfffffffffffffffe,0x00000004fffffffd .Lone_mont: .quad 0x0000000000000001,0xffffffff00000000,0xffffffffffffffff,0x00000000fffffffe .Lone: .quad 1,0,0,0 .Lord: .quad 0xf3b9cac2fc632551,0xbce6faada7179e84,0xffffffffffffffff,0xffffffff00000000 .LordK: .quad 0xccd1c8aaee00bc4f .byte 69,67,80,95,78,73,83,84,90,50,53,54,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .text // void ecp_nistz256_mul_mont(BN_ULONG x0[4],const BN_ULONG x1[4], // const BN_ULONG x2[4]); .globl ecp_nistz256_mul_mont .hidden ecp_nistz256_mul_mont .type ecp_nistz256_mul_mont,%function .align 4 ecp_nistz256_mul_mont: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-32]! add x29,sp,#0 stp x19,x20,[sp,#16] ldr x3,[x2] // bp[0] ldp x4,x5,[x1] ldp x6,x7,[x1,#16] adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_mul_mont ldp x19,x20,[sp,#16] ldp x29,x30,[sp],#32 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_mul_mont,.-ecp_nistz256_mul_mont // void ecp_nistz256_sqr_mont(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl ecp_nistz256_sqr_mont .hidden ecp_nistz256_sqr_mont .type ecp_nistz256_sqr_mont,%function .align 4 ecp_nistz256_sqr_mont: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-32]! add x29,sp,#0 stp x19,x20,[sp,#16] ldp x4,x5,[x1] ldp x6,x7,[x1,#16] adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_sqr_mont ldp x19,x20,[sp,#16] ldp x29,x30,[sp],#32 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_sqr_mont,.-ecp_nistz256_sqr_mont // void ecp_nistz256_div_by_2(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl ecp_nistz256_div_by_2 .hidden ecp_nistz256_div_by_2 .type ecp_nistz256_div_by_2,%function .align 4 ecp_nistz256_div_by_2: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ldp x14,x15,[x1] ldp x16,x17,[x1,#16] adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_div_by_2 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_div_by_2,.-ecp_nistz256_div_by_2 // void ecp_nistz256_mul_by_2(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl ecp_nistz256_mul_by_2 .hidden ecp_nistz256_mul_by_2 .type ecp_nistz256_mul_by_2,%function .align 4 ecp_nistz256_mul_by_2: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ldp x14,x15,[x1] ldp x16,x17,[x1,#16] adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] mov x8,x14 mov x9,x15 mov x10,x16 mov x11,x17 bl __ecp_nistz256_add_to // ret = a+a // 2*a ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_mul_by_2,.-ecp_nistz256_mul_by_2 // void ecp_nistz256_mul_by_3(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl ecp_nistz256_mul_by_3 .hidden ecp_nistz256_mul_by_3 .type ecp_nistz256_mul_by_3,%function .align 4 ecp_nistz256_mul_by_3: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ldp x14,x15,[x1] ldp x16,x17,[x1,#16] adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] mov x8,x14 mov x9,x15 mov x10,x16 mov x11,x17 mov x4,x14 mov x5,x15 mov x6,x16 mov x7,x17 bl __ecp_nistz256_add_to // ret = a+a // 2*a mov x8,x4 mov x9,x5 mov x10,x6 mov x11,x7 bl __ecp_nistz256_add_to // ret += a // 2*a+a=3*a ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_mul_by_3,.-ecp_nistz256_mul_by_3 // void ecp_nistz256_sub(BN_ULONG x0[4],const BN_ULONG x1[4], // const BN_ULONG x2[4]); .globl ecp_nistz256_sub .hidden ecp_nistz256_sub .type ecp_nistz256_sub,%function .align 4 ecp_nistz256_sub: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ldp x14,x15,[x1] ldp x16,x17,[x1,#16] adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_sub_from ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_sub,.-ecp_nistz256_sub // void ecp_nistz256_neg(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl ecp_nistz256_neg .hidden ecp_nistz256_neg .type ecp_nistz256_neg,%function .align 4 ecp_nistz256_neg: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 mov x2,x1 mov x14,xzr // a = 0 mov x15,xzr mov x16,xzr mov x17,xzr adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_sub_from ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_neg,.-ecp_nistz256_neg // note that __ecp_nistz256_mul_mont expects a[0-3] input pre-loaded // to x4-x7 and b[0] - to x3 .type __ecp_nistz256_mul_mont,%function .align 4 __ecp_nistz256_mul_mont: mul x14,x4,x3 // a[0]*b[0] umulh x8,x4,x3 mul x15,x5,x3 // a[1]*b[0] umulh x9,x5,x3 mul x16,x6,x3 // a[2]*b[0] umulh x10,x6,x3 mul x17,x7,x3 // a[3]*b[0] umulh x11,x7,x3 ldr x3,[x2,#8] // b[1] adds x15,x15,x8 // accumulate high parts of multiplication lsl x8,x14,#32 adcs x16,x16,x9 lsr x9,x14,#32 adcs x17,x17,x10 adc x19,xzr,x11 mov x20,xzr subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] mul x8,x4,x3 // lo(a[0]*b[i]) adcs x15,x16,x9 mul x9,x5,x3 // lo(a[1]*b[i]) adcs x16,x17,x10 // +=acc[0]*0xffff0001 mul x10,x6,x3 // lo(a[2]*b[i]) adcs x17,x19,x11 mul x11,x7,x3 // lo(a[3]*b[i]) adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts of multiplication umulh x8,x4,x3 // hi(a[0]*b[i]) adcs x15,x15,x9 umulh x9,x5,x3 // hi(a[1]*b[i]) adcs x16,x16,x10 umulh x10,x6,x3 // hi(a[2]*b[i]) adcs x17,x17,x11 umulh x11,x7,x3 // hi(a[3]*b[i]) adc x19,x19,xzr ldr x3,[x2,#8*(1+1)] // b[1+1] adds x15,x15,x8 // accumulate high parts of multiplication lsl x8,x14,#32 adcs x16,x16,x9 lsr x9,x14,#32 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] mul x8,x4,x3 // lo(a[0]*b[i]) adcs x15,x16,x9 mul x9,x5,x3 // lo(a[1]*b[i]) adcs x16,x17,x10 // +=acc[0]*0xffff0001 mul x10,x6,x3 // lo(a[2]*b[i]) adcs x17,x19,x11 mul x11,x7,x3 // lo(a[3]*b[i]) adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts of multiplication umulh x8,x4,x3 // hi(a[0]*b[i]) adcs x15,x15,x9 umulh x9,x5,x3 // hi(a[1]*b[i]) adcs x16,x16,x10 umulh x10,x6,x3 // hi(a[2]*b[i]) adcs x17,x17,x11 umulh x11,x7,x3 // hi(a[3]*b[i]) adc x19,x19,xzr ldr x3,[x2,#8*(2+1)] // b[2+1] adds x15,x15,x8 // accumulate high parts of multiplication lsl x8,x14,#32 adcs x16,x16,x9 lsr x9,x14,#32 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] mul x8,x4,x3 // lo(a[0]*b[i]) adcs x15,x16,x9 mul x9,x5,x3 // lo(a[1]*b[i]) adcs x16,x17,x10 // +=acc[0]*0xffff0001 mul x10,x6,x3 // lo(a[2]*b[i]) adcs x17,x19,x11 mul x11,x7,x3 // lo(a[3]*b[i]) adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts of multiplication umulh x8,x4,x3 // hi(a[0]*b[i]) adcs x15,x15,x9 umulh x9,x5,x3 // hi(a[1]*b[i]) adcs x16,x16,x10 umulh x10,x6,x3 // hi(a[2]*b[i]) adcs x17,x17,x11 umulh x11,x7,x3 // hi(a[3]*b[i]) adc x19,x19,xzr adds x15,x15,x8 // accumulate high parts of multiplication lsl x8,x14,#32 adcs x16,x16,x9 lsr x9,x14,#32 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr // last reduction subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 adcs x16,x17,x10 // +=acc[0]*0xffff0001 adcs x17,x19,x11 adc x19,x20,xzr adds x8,x14,#1 // subs x8,x14,#-1 // tmp = ret-modulus sbcs x9,x15,x12 sbcs x10,x16,xzr sbcs x11,x17,x13 sbcs xzr,x19,xzr // did it borrow? csel x14,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x15,x15,x9,lo csel x16,x16,x10,lo stp x14,x15,[x0] csel x17,x17,x11,lo stp x16,x17,[x0,#16] ret .size __ecp_nistz256_mul_mont,.-__ecp_nistz256_mul_mont // note that __ecp_nistz256_sqr_mont expects a[0-3] input pre-loaded // to x4-x7 .type __ecp_nistz256_sqr_mont,%function .align 4 __ecp_nistz256_sqr_mont: // | | | | | |a1*a0| | // | | | | |a2*a0| | | // | |a3*a2|a3*a0| | | | // | | | |a2*a1| | | | // | | |a3*a1| | | | | // *| | | | | | | | 2| // +|a3*a3|a2*a2|a1*a1|a0*a0| // |--+--+--+--+--+--+--+--| // |A7|A6|A5|A4|A3|A2|A1|A0|, where Ax is , i.e. follow // // "can't overflow" below mark carrying into high part of // multiplication result, which can't overflow, because it // can never be all ones. mul x15,x5,x4 // a[1]*a[0] umulh x9,x5,x4 mul x16,x6,x4 // a[2]*a[0] umulh x10,x6,x4 mul x17,x7,x4 // a[3]*a[0] umulh x19,x7,x4 adds x16,x16,x9 // accumulate high parts of multiplication mul x8,x6,x5 // a[2]*a[1] umulh x9,x6,x5 adcs x17,x17,x10 mul x10,x7,x5 // a[3]*a[1] umulh x11,x7,x5 adc x19,x19,xzr // can't overflow mul x20,x7,x6 // a[3]*a[2] umulh x1,x7,x6 adds x9,x9,x10 // accumulate high parts of multiplication mul x14,x4,x4 // a[0]*a[0] adc x10,x11,xzr // can't overflow adds x17,x17,x8 // accumulate low parts of multiplication umulh x4,x4,x4 adcs x19,x19,x9 mul x9,x5,x5 // a[1]*a[1] adcs x20,x20,x10 umulh x5,x5,x5 adc x1,x1,xzr // can't overflow adds x15,x15,x15 // acc[1-6]*=2 mul x10,x6,x6 // a[2]*a[2] adcs x16,x16,x16 umulh x6,x6,x6 adcs x17,x17,x17 mul x11,x7,x7 // a[3]*a[3] adcs x19,x19,x19 umulh x7,x7,x7 adcs x20,x20,x20 adcs x1,x1,x1 adc x2,xzr,xzr adds x15,x15,x4 // +a[i]*a[i] adcs x16,x16,x9 adcs x17,x17,x5 adcs x19,x19,x10 adcs x20,x20,x6 lsl x8,x14,#32 adcs x1,x1,x11 lsr x9,x14,#32 adc x2,x2,x7 subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 lsl x8,x14,#32 adcs x16,x17,x10 // +=acc[0]*0xffff0001 lsr x9,x14,#32 adc x17,x11,xzr // can't overflow subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 lsl x8,x14,#32 adcs x16,x17,x10 // +=acc[0]*0xffff0001 lsr x9,x14,#32 adc x17,x11,xzr // can't overflow subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 lsl x8,x14,#32 adcs x16,x17,x10 // +=acc[0]*0xffff0001 lsr x9,x14,#32 adc x17,x11,xzr // can't overflow subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 adcs x16,x17,x10 // +=acc[0]*0xffff0001 adc x17,x11,xzr // can't overflow adds x14,x14,x19 // accumulate upper half adcs x15,x15,x20 adcs x16,x16,x1 adcs x17,x17,x2 adc x19,xzr,xzr adds x8,x14,#1 // subs x8,x14,#-1 // tmp = ret-modulus sbcs x9,x15,x12 sbcs x10,x16,xzr sbcs x11,x17,x13 sbcs xzr,x19,xzr // did it borrow? csel x14,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x15,x15,x9,lo csel x16,x16,x10,lo stp x14,x15,[x0] csel x17,x17,x11,lo stp x16,x17,[x0,#16] ret .size __ecp_nistz256_sqr_mont,.-__ecp_nistz256_sqr_mont // Note that __ecp_nistz256_add_to expects both input vectors pre-loaded to // x4-x7 and x8-x11. This is done because it's used in multiple // contexts, e.g. in multiplication by 2 and 3... .type __ecp_nistz256_add_to,%function .align 4 __ecp_nistz256_add_to: adds x14,x14,x8 // ret = a+b adcs x15,x15,x9 adcs x16,x16,x10 adcs x17,x17,x11 adc x1,xzr,xzr // zap x1 adds x8,x14,#1 // subs x8,x4,#-1 // tmp = ret-modulus sbcs x9,x15,x12 sbcs x10,x16,xzr sbcs x11,x17,x13 sbcs xzr,x1,xzr // did subtraction borrow? csel x14,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x15,x15,x9,lo csel x16,x16,x10,lo stp x14,x15,[x0] csel x17,x17,x11,lo stp x16,x17,[x0,#16] ret .size __ecp_nistz256_add_to,.-__ecp_nistz256_add_to .type __ecp_nistz256_sub_from,%function .align 4 __ecp_nistz256_sub_from: ldp x8,x9,[x2] ldp x10,x11,[x2,#16] subs x14,x14,x8 // ret = a-b sbcs x15,x15,x9 sbcs x16,x16,x10 sbcs x17,x17,x11 sbc x1,xzr,xzr // zap x1 subs x8,x14,#1 // adds x8,x4,#-1 // tmp = ret+modulus adcs x9,x15,x12 adcs x10,x16,xzr adc x11,x17,x13 cmp x1,xzr // did subtraction borrow? csel x14,x14,x8,eq // ret = borrow ? ret+modulus : ret csel x15,x15,x9,eq csel x16,x16,x10,eq stp x14,x15,[x0] csel x17,x17,x11,eq stp x16,x17,[x0,#16] ret .size __ecp_nistz256_sub_from,.-__ecp_nistz256_sub_from .type __ecp_nistz256_sub_morf,%function .align 4 __ecp_nistz256_sub_morf: ldp x8,x9,[x2] ldp x10,x11,[x2,#16] subs x14,x8,x14 // ret = b-a sbcs x15,x9,x15 sbcs x16,x10,x16 sbcs x17,x11,x17 sbc x1,xzr,xzr // zap x1 subs x8,x14,#1 // adds x8,x4,#-1 // tmp = ret+modulus adcs x9,x15,x12 adcs x10,x16,xzr adc x11,x17,x13 cmp x1,xzr // did subtraction borrow? csel x14,x14,x8,eq // ret = borrow ? ret+modulus : ret csel x15,x15,x9,eq csel x16,x16,x10,eq stp x14,x15,[x0] csel x17,x17,x11,eq stp x16,x17,[x0,#16] ret .size __ecp_nistz256_sub_morf,.-__ecp_nistz256_sub_morf .type __ecp_nistz256_div_by_2,%function .align 4 __ecp_nistz256_div_by_2: subs x8,x14,#1 // adds x8,x4,#-1 // tmp = a+modulus adcs x9,x15,x12 adcs x10,x16,xzr adcs x11,x17,x13 adc x1,xzr,xzr // zap x1 tst x14,#1 // is a even? csel x14,x14,x8,eq // ret = even ? a : a+modulus csel x15,x15,x9,eq csel x16,x16,x10,eq csel x17,x17,x11,eq csel x1,xzr,x1,eq lsr x14,x14,#1 // ret >>= 1 orr x14,x14,x15,lsl#63 lsr x15,x15,#1 orr x15,x15,x16,lsl#63 lsr x16,x16,#1 orr x16,x16,x17,lsl#63 lsr x17,x17,#1 stp x14,x15,[x0] orr x17,x17,x1,lsl#63 stp x16,x17,[x0,#16] ret .size __ecp_nistz256_div_by_2,.-__ecp_nistz256_div_by_2 .globl ecp_nistz256_point_double .hidden ecp_nistz256_point_double .type ecp_nistz256_point_double,%function .align 5 ecp_nistz256_point_double: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] sub sp,sp,#32*4 .Ldouble_shortcut: ldp x14,x15,[x1,#32] mov x21,x0 ldp x16,x17,[x1,#48] mov x22,x1 adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] mov x8,x14 ldr x13,[x13,#24] mov x9,x15 ldp x4,x5,[x22,#64] // forward load for p256_sqr_mont mov x10,x16 mov x11,x17 ldp x6,x7,[x22,#64+16] add x0,sp,#0 bl __ecp_nistz256_add_to // p256_mul_by_2(S, in_y); add x0,sp,#64 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Zsqr, in_z); ldp x8,x9,[x22] ldp x10,x11,[x22,#16] mov x4,x14 // put Zsqr aside for p256_sub mov x5,x15 mov x6,x16 mov x7,x17 add x0,sp,#32 bl __ecp_nistz256_add_to // p256_add(M, Zsqr, in_x); add x2,x22,#0 mov x14,x4 // restore Zsqr mov x15,x5 ldp x4,x5,[sp,#0] // forward load for p256_sqr_mont mov x16,x6 mov x17,x7 ldp x6,x7,[sp,#0+16] add x0,sp,#64 bl __ecp_nistz256_sub_morf // p256_sub(Zsqr, in_x, Zsqr); add x0,sp,#0 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(S, S); ldr x3,[x22,#32] ldp x4,x5,[x22,#64] ldp x6,x7,[x22,#64+16] add x2,x22,#32 add x0,sp,#96 bl __ecp_nistz256_mul_mont // p256_mul_mont(tmp0, in_z, in_y); mov x8,x14 mov x9,x15 ldp x4,x5,[sp,#0] // forward load for p256_sqr_mont mov x10,x16 mov x11,x17 ldp x6,x7,[sp,#0+16] add x0,x21,#64 bl __ecp_nistz256_add_to // p256_mul_by_2(res_z, tmp0); add x0,sp,#96 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(tmp0, S); ldr x3,[sp,#64] // forward load for p256_mul_mont ldp x4,x5,[sp,#32] ldp x6,x7,[sp,#32+16] add x0,x21,#32 bl __ecp_nistz256_div_by_2 // p256_div_by_2(res_y, tmp0); add x2,sp,#64 add x0,sp,#32 bl __ecp_nistz256_mul_mont // p256_mul_mont(M, M, Zsqr); mov x8,x14 // duplicate M mov x9,x15 mov x10,x16 mov x11,x17 mov x4,x14 // put M aside mov x5,x15 mov x6,x16 mov x7,x17 add x0,sp,#32 bl __ecp_nistz256_add_to mov x8,x4 // restore M mov x9,x5 ldr x3,[x22] // forward load for p256_mul_mont mov x10,x6 ldp x4,x5,[sp,#0] mov x11,x7 ldp x6,x7,[sp,#0+16] bl __ecp_nistz256_add_to // p256_mul_by_3(M, M); add x2,x22,#0 add x0,sp,#0 bl __ecp_nistz256_mul_mont // p256_mul_mont(S, S, in_x); mov x8,x14 mov x9,x15 ldp x4,x5,[sp,#32] // forward load for p256_sqr_mont mov x10,x16 mov x11,x17 ldp x6,x7,[sp,#32+16] add x0,sp,#96 bl __ecp_nistz256_add_to // p256_mul_by_2(tmp0, S); add x0,x21,#0 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(res_x, M); add x2,sp,#96 bl __ecp_nistz256_sub_from // p256_sub(res_x, res_x, tmp0); add x2,sp,#0 add x0,sp,#0 bl __ecp_nistz256_sub_morf // p256_sub(S, S, res_x); ldr x3,[sp,#32] mov x4,x14 // copy S mov x5,x15 mov x6,x16 mov x7,x17 add x2,sp,#32 bl __ecp_nistz256_mul_mont // p256_mul_mont(S, S, M); add x2,x21,#32 add x0,x21,#32 bl __ecp_nistz256_sub_from // p256_sub(res_y, S, res_y); add sp,x29,#0 // destroy frame ldp x19,x20,[x29,#16] ldp x21,x22,[x29,#32] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_point_double,.-ecp_nistz256_point_double .globl ecp_nistz256_point_add .hidden ecp_nistz256_point_add .type ecp_nistz256_point_add,%function .align 5 ecp_nistz256_point_add: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#32*12 ldp x4,x5,[x2,#64] // in2_z ldp x6,x7,[x2,#64+16] mov x21,x0 mov x22,x1 mov x23,x2 adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] orr x8,x4,x5 orr x10,x6,x7 orr x25,x8,x10 cmp x25,#0 csetm x25,ne // ~in2infty add x0,sp,#192 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z2sqr, in2_z); ldp x4,x5,[x22,#64] // in1_z ldp x6,x7,[x22,#64+16] orr x8,x4,x5 orr x10,x6,x7 orr x24,x8,x10 cmp x24,#0 csetm x24,ne // ~in1infty add x0,sp,#128 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z1sqr, in1_z); ldr x3,[x23,#64] ldp x4,x5,[sp,#192] ldp x6,x7,[sp,#192+16] add x2,x23,#64 add x0,sp,#320 bl __ecp_nistz256_mul_mont // p256_mul_mont(S1, Z2sqr, in2_z); ldr x3,[x22,#64] ldp x4,x5,[sp,#128] ldp x6,x7,[sp,#128+16] add x2,x22,#64 add x0,sp,#352 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, Z1sqr, in1_z); ldr x3,[x22,#32] ldp x4,x5,[sp,#320] ldp x6,x7,[sp,#320+16] add x2,x22,#32 add x0,sp,#320 bl __ecp_nistz256_mul_mont // p256_mul_mont(S1, S1, in1_y); ldr x3,[x23,#32] ldp x4,x5,[sp,#352] ldp x6,x7,[sp,#352+16] add x2,x23,#32 add x0,sp,#352 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, S2, in2_y); add x2,sp,#320 ldr x3,[sp,#192] // forward load for p256_mul_mont ldp x4,x5,[x22] ldp x6,x7,[x22,#16] add x0,sp,#160 bl __ecp_nistz256_sub_from // p256_sub(R, S2, S1); orr x14,x14,x15 // see if result is zero orr x16,x16,x17 orr x26,x14,x16 // ~is_equal(S1,S2) add x2,sp,#192 add x0,sp,#256 bl __ecp_nistz256_mul_mont // p256_mul_mont(U1, in1_x, Z2sqr); ldr x3,[sp,#128] ldp x4,x5,[x23] ldp x6,x7,[x23,#16] add x2,sp,#128 add x0,sp,#288 bl __ecp_nistz256_mul_mont // p256_mul_mont(U2, in2_x, Z1sqr); add x2,sp,#256 ldp x4,x5,[sp,#160] // forward load for p256_sqr_mont ldp x6,x7,[sp,#160+16] add x0,sp,#96 bl __ecp_nistz256_sub_from // p256_sub(H, U2, U1); orr x14,x14,x15 // see if result is zero orr x16,x16,x17 orr x14,x14,x16 // ~is_equal(U1,U2) mvn x27,x24 // -1/0 -> 0/-1 mvn x28,x25 // -1/0 -> 0/-1 orr x14,x14,x27 orr x14,x14,x28 orr x14,x14,x26 cbnz x14,.Ladd_proceed // if(~is_equal(U1,U2) | in1infty | in2infty | ~is_equal(S1,S2)) .Ladd_double: mov x1,x22 mov x0,x21 ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] add sp,sp,#256 // #256 is from #32*(12-4). difference in stack frames b .Ldouble_shortcut .align 4 .Ladd_proceed: add x0,sp,#192 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Rsqr, R); ldr x3,[x22,#64] ldp x4,x5,[sp,#96] ldp x6,x7,[sp,#96+16] add x2,x22,#64 add x0,sp,#64 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_z, H, in1_z); ldp x4,x5,[sp,#96] ldp x6,x7,[sp,#96+16] add x0,sp,#128 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Hsqr, H); ldr x3,[x23,#64] ldp x4,x5,[sp,#64] ldp x6,x7,[sp,#64+16] add x2,x23,#64 add x0,sp,#64 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_z, res_z, in2_z); ldr x3,[sp,#96] ldp x4,x5,[sp,#128] ldp x6,x7,[sp,#128+16] add x2,sp,#96 add x0,sp,#224 bl __ecp_nistz256_mul_mont // p256_mul_mont(Hcub, Hsqr, H); ldr x3,[sp,#128] ldp x4,x5,[sp,#256] ldp x6,x7,[sp,#256+16] add x2,sp,#128 add x0,sp,#288 bl __ecp_nistz256_mul_mont // p256_mul_mont(U2, U1, Hsqr); mov x8,x14 mov x9,x15 mov x10,x16 mov x11,x17 add x0,sp,#128 bl __ecp_nistz256_add_to // p256_mul_by_2(Hsqr, U2); add x2,sp,#192 add x0,sp,#0 bl __ecp_nistz256_sub_morf // p256_sub(res_x, Rsqr, Hsqr); add x2,sp,#224 bl __ecp_nistz256_sub_from // p256_sub(res_x, res_x, Hcub); add x2,sp,#288 ldr x3,[sp,#224] // forward load for p256_mul_mont ldp x4,x5,[sp,#320] ldp x6,x7,[sp,#320+16] add x0,sp,#32 bl __ecp_nistz256_sub_morf // p256_sub(res_y, U2, res_x); add x2,sp,#224 add x0,sp,#352 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, S1, Hcub); ldr x3,[sp,#160] ldp x4,x5,[sp,#32] ldp x6,x7,[sp,#32+16] add x2,sp,#160 add x0,sp,#32 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_y, res_y, R); add x2,sp,#352 bl __ecp_nistz256_sub_from // p256_sub(res_y, res_y, S2); ldp x4,x5,[sp,#0] // res ldp x6,x7,[sp,#0+16] ldp x8,x9,[x23] // in2 ldp x10,x11,[x23,#16] ldp x14,x15,[x22,#0] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#0+16] csel x8,x4,x8,ne csel x9,x5,x9,ne ldp x4,x5,[sp,#0+0+32] // res csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? ldp x6,x7,[sp,#0+0+48] csel x14,x8,x14,ne csel x15,x9,x15,ne ldp x8,x9,[x23,#0+32] // in2 csel x16,x10,x16,ne csel x17,x11,x17,ne ldp x10,x11,[x23,#0+48] stp x14,x15,[x21,#0] stp x16,x17,[x21,#0+16] ldp x14,x15,[x22,#32] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#32+16] csel x8,x4,x8,ne csel x9,x5,x9,ne ldp x4,x5,[sp,#0+32+32] // res csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? ldp x6,x7,[sp,#0+32+48] csel x14,x8,x14,ne csel x15,x9,x15,ne ldp x8,x9,[x23,#32+32] // in2 csel x16,x10,x16,ne csel x17,x11,x17,ne ldp x10,x11,[x23,#32+48] stp x14,x15,[x21,#32] stp x16,x17,[x21,#32+16] ldp x14,x15,[x22,#64] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#64+16] csel x8,x4,x8,ne csel x9,x5,x9,ne csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? csel x14,x8,x14,ne csel x15,x9,x15,ne csel x16,x10,x16,ne csel x17,x11,x17,ne stp x14,x15,[x21,#64] stp x16,x17,[x21,#64+16] .Ladd_done: add sp,x29,#0 // destroy frame ldp x19,x20,[x29,#16] ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_point_add,.-ecp_nistz256_point_add .globl ecp_nistz256_point_add_affine .hidden ecp_nistz256_point_add_affine .type ecp_nistz256_point_add_affine,%function .align 5 ecp_nistz256_point_add_affine: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-80]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] sub sp,sp,#32*10 mov x21,x0 mov x22,x1 mov x23,x2 adrp x13,.Lpoly add x13,x13,:lo12:.Lpoly ldr x12,[x13,#8] ldr x13,[x13,#24] ldp x4,x5,[x1,#64] // in1_z ldp x6,x7,[x1,#64+16] orr x8,x4,x5 orr x10,x6,x7 orr x24,x8,x10 cmp x24,#0 csetm x24,ne // ~in1infty ldp x14,x15,[x2] // in2_x ldp x16,x17,[x2,#16] ldp x8,x9,[x2,#32] // in2_y ldp x10,x11,[x2,#48] orr x14,x14,x15 orr x16,x16,x17 orr x8,x8,x9 orr x10,x10,x11 orr x14,x14,x16 orr x8,x8,x10 orr x25,x14,x8 cmp x25,#0 csetm x25,ne // ~in2infty add x0,sp,#128 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z1sqr, in1_z); mov x4,x14 mov x5,x15 mov x6,x16 mov x7,x17 ldr x3,[x23] add x2,x23,#0 add x0,sp,#96 bl __ecp_nistz256_mul_mont // p256_mul_mont(U2, Z1sqr, in2_x); add x2,x22,#0 ldr x3,[x22,#64] // forward load for p256_mul_mont ldp x4,x5,[sp,#128] ldp x6,x7,[sp,#128+16] add x0,sp,#160 bl __ecp_nistz256_sub_from // p256_sub(H, U2, in1_x); add x2,x22,#64 add x0,sp,#128 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, Z1sqr, in1_z); ldr x3,[x22,#64] ldp x4,x5,[sp,#160] ldp x6,x7,[sp,#160+16] add x2,x22,#64 add x0,sp,#64 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_z, H, in1_z); ldr x3,[x23,#32] ldp x4,x5,[sp,#128] ldp x6,x7,[sp,#128+16] add x2,x23,#32 add x0,sp,#128 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, S2, in2_y); add x2,x22,#32 ldp x4,x5,[sp,#160] // forward load for p256_sqr_mont ldp x6,x7,[sp,#160+16] add x0,sp,#192 bl __ecp_nistz256_sub_from // p256_sub(R, S2, in1_y); add x0,sp,#224 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Hsqr, H); ldp x4,x5,[sp,#192] ldp x6,x7,[sp,#192+16] add x0,sp,#288 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Rsqr, R); ldr x3,[sp,#160] ldp x4,x5,[sp,#224] ldp x6,x7,[sp,#224+16] add x2,sp,#160 add x0,sp,#256 bl __ecp_nistz256_mul_mont // p256_mul_mont(Hcub, Hsqr, H); ldr x3,[x22] ldp x4,x5,[sp,#224] ldp x6,x7,[sp,#224+16] add x2,x22,#0 add x0,sp,#96 bl __ecp_nistz256_mul_mont // p256_mul_mont(U2, in1_x, Hsqr); mov x8,x14 mov x9,x15 mov x10,x16 mov x11,x17 add x0,sp,#224 bl __ecp_nistz256_add_to // p256_mul_by_2(Hsqr, U2); add x2,sp,#288 add x0,sp,#0 bl __ecp_nistz256_sub_morf // p256_sub(res_x, Rsqr, Hsqr); add x2,sp,#256 bl __ecp_nistz256_sub_from // p256_sub(res_x, res_x, Hcub); add x2,sp,#96 ldr x3,[x22,#32] // forward load for p256_mul_mont ldp x4,x5,[sp,#256] ldp x6,x7,[sp,#256+16] add x0,sp,#32 bl __ecp_nistz256_sub_morf // p256_sub(res_y, U2, res_x); add x2,x22,#32 add x0,sp,#128 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, in1_y, Hcub); ldr x3,[sp,#192] ldp x4,x5,[sp,#32] ldp x6,x7,[sp,#32+16] add x2,sp,#192 add x0,sp,#32 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_y, res_y, R); add x2,sp,#128 bl __ecp_nistz256_sub_from // p256_sub(res_y, res_y, S2); ldp x4,x5,[sp,#0] // res ldp x6,x7,[sp,#0+16] ldp x8,x9,[x23] // in2 ldp x10,x11,[x23,#16] ldp x14,x15,[x22,#0] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#0+16] csel x8,x4,x8,ne csel x9,x5,x9,ne ldp x4,x5,[sp,#0+0+32] // res csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? ldp x6,x7,[sp,#0+0+48] csel x14,x8,x14,ne csel x15,x9,x15,ne ldp x8,x9,[x23,#0+32] // in2 csel x16,x10,x16,ne csel x17,x11,x17,ne ldp x10,x11,[x23,#0+48] stp x14,x15,[x21,#0] stp x16,x17,[x21,#0+16] adrp x23,.Lone_mont-64 add x23,x23,:lo12:.Lone_mont-64 ldp x14,x15,[x22,#32] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#32+16] csel x8,x4,x8,ne csel x9,x5,x9,ne ldp x4,x5,[sp,#0+32+32] // res csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? ldp x6,x7,[sp,#0+32+48] csel x14,x8,x14,ne csel x15,x9,x15,ne ldp x8,x9,[x23,#32+32] // in2 csel x16,x10,x16,ne csel x17,x11,x17,ne ldp x10,x11,[x23,#32+48] stp x14,x15,[x21,#32] stp x16,x17,[x21,#32+16] ldp x14,x15,[x22,#64] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#64+16] csel x8,x4,x8,ne csel x9,x5,x9,ne csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? csel x14,x8,x14,ne csel x15,x9,x15,ne csel x16,x10,x16,ne csel x17,x11,x17,ne stp x14,x15,[x21,#64] stp x16,x17,[x21,#64+16] add sp,x29,#0 // destroy frame ldp x19,x20,[x29,#16] ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x29,x30,[sp],#80 AARCH64_VALIDATE_LINK_REGISTER ret .size ecp_nistz256_point_add_affine,.-ecp_nistz256_point_add_affine //////////////////////////////////////////////////////////////////////// // void ecp_nistz256_ord_mul_mont(uint64_t res[4], uint64_t a[4], // uint64_t b[4]); .globl ecp_nistz256_ord_mul_mont .hidden ecp_nistz256_ord_mul_mont .type ecp_nistz256_ord_mul_mont,%function .align 4 ecp_nistz256_ord_mul_mont: AARCH64_VALID_CALL_TARGET // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. stp x29,x30,[sp,#-64]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] adrp x23,.Lord add x23,x23,:lo12:.Lord ldr x3,[x2] // bp[0] ldp x4,x5,[x1] ldp x6,x7,[x1,#16] ldp x12,x13,[x23,#0] ldp x21,x22,[x23,#16] ldr x23,[x23,#32] mul x14,x4,x3 // a[0]*b[0] umulh x8,x4,x3 mul x15,x5,x3 // a[1]*b[0] umulh x9,x5,x3 mul x16,x6,x3 // a[2]*b[0] umulh x10,x6,x3 mul x17,x7,x3 // a[3]*b[0] umulh x19,x7,x3 mul x24,x14,x23 adds x15,x15,x8 // accumulate high parts of multiplication adcs x16,x16,x9 adcs x17,x17,x10 adc x19,x19,xzr mov x20,xzr ldr x3,[x2,#8*1] // b[i] lsl x8,x24,#32 subs x16,x16,x24 lsr x9,x24,#32 sbcs x17,x17,x8 sbcs x19,x19,x9 sbc x20,x20,xzr subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 mul x8,x4,x3 adc x11,x11,xzr mul x9,x5,x3 adds x14,x15,x10 mul x10,x6,x3 adcs x15,x16,x11 mul x11,x7,x3 adcs x16,x17,x24 adcs x17,x19,x24 adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts umulh x8,x4,x3 adcs x15,x15,x9 umulh x9,x5,x3 adcs x16,x16,x10 umulh x10,x6,x3 adcs x17,x17,x11 umulh x11,x7,x3 adc x19,x19,xzr mul x24,x14,x23 adds x15,x15,x8 // accumulate high parts adcs x16,x16,x9 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr ldr x3,[x2,#8*2] // b[i] lsl x8,x24,#32 subs x16,x16,x24 lsr x9,x24,#32 sbcs x17,x17,x8 sbcs x19,x19,x9 sbc x20,x20,xzr subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 mul x8,x4,x3 adc x11,x11,xzr mul x9,x5,x3 adds x14,x15,x10 mul x10,x6,x3 adcs x15,x16,x11 mul x11,x7,x3 adcs x16,x17,x24 adcs x17,x19,x24 adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts umulh x8,x4,x3 adcs x15,x15,x9 umulh x9,x5,x3 adcs x16,x16,x10 umulh x10,x6,x3 adcs x17,x17,x11 umulh x11,x7,x3 adc x19,x19,xzr mul x24,x14,x23 adds x15,x15,x8 // accumulate high parts adcs x16,x16,x9 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr ldr x3,[x2,#8*3] // b[i] lsl x8,x24,#32 subs x16,x16,x24 lsr x9,x24,#32 sbcs x17,x17,x8 sbcs x19,x19,x9 sbc x20,x20,xzr subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 mul x8,x4,x3 adc x11,x11,xzr mul x9,x5,x3 adds x14,x15,x10 mul x10,x6,x3 adcs x15,x16,x11 mul x11,x7,x3 adcs x16,x17,x24 adcs x17,x19,x24 adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts umulh x8,x4,x3 adcs x15,x15,x9 umulh x9,x5,x3 adcs x16,x16,x10 umulh x10,x6,x3 adcs x17,x17,x11 umulh x11,x7,x3 adc x19,x19,xzr mul x24,x14,x23 adds x15,x15,x8 // accumulate high parts adcs x16,x16,x9 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr lsl x8,x24,#32 // last reduction subs x16,x16,x24 lsr x9,x24,#32 sbcs x17,x17,x8 sbcs x19,x19,x9 sbc x20,x20,xzr subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 adc x11,x11,xzr adds x14,x15,x10 adcs x15,x16,x11 adcs x16,x17,x24 adcs x17,x19,x24 adc x19,x20,xzr subs x8,x14,x12 // ret -= modulus sbcs x9,x15,x13 sbcs x10,x16,x21 sbcs x11,x17,x22 sbcs xzr,x19,xzr csel x14,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x15,x15,x9,lo csel x16,x16,x10,lo stp x14,x15,[x0] csel x17,x17,x11,lo stp x16,x17,[x0,#16] ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] ldp x23,x24,[sp,#48] ldr x29,[sp],#64 ret .size ecp_nistz256_ord_mul_mont,.-ecp_nistz256_ord_mul_mont //////////////////////////////////////////////////////////////////////// // void ecp_nistz256_ord_sqr_mont(uint64_t res[4], uint64_t a[4], // uint64_t rep); .globl ecp_nistz256_ord_sqr_mont .hidden ecp_nistz256_ord_sqr_mont .type ecp_nistz256_ord_sqr_mont,%function .align 4 ecp_nistz256_ord_sqr_mont: AARCH64_VALID_CALL_TARGET // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. stp x29,x30,[sp,#-64]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] adrp x23,.Lord add x23,x23,:lo12:.Lord ldp x4,x5,[x1] ldp x6,x7,[x1,#16] ldp x12,x13,[x23,#0] ldp x21,x22,[x23,#16] ldr x23,[x23,#32] b .Loop_ord_sqr .align 4 .Loop_ord_sqr: sub x2,x2,#1 //////////////////////////////////////////////////////////////// // | | | | | |a1*a0| | // | | | | |a2*a0| | | // | |a3*a2|a3*a0| | | | // | | | |a2*a1| | | | // | | |a3*a1| | | | | // *| | | | | | | | 2| // +|a3*a3|a2*a2|a1*a1|a0*a0| // |--+--+--+--+--+--+--+--| // |A7|A6|A5|A4|A3|A2|A1|A0|, where Ax is , i.e. follow // // "can't overflow" below mark carrying into high part of // multiplication result, which can't overflow, because it // can never be all ones. mul x15,x5,x4 // a[1]*a[0] umulh x9,x5,x4 mul x16,x6,x4 // a[2]*a[0] umulh x10,x6,x4 mul x17,x7,x4 // a[3]*a[0] umulh x19,x7,x4 adds x16,x16,x9 // accumulate high parts of multiplication mul x8,x6,x5 // a[2]*a[1] umulh x9,x6,x5 adcs x17,x17,x10 mul x10,x7,x5 // a[3]*a[1] umulh x11,x7,x5 adc x19,x19,xzr // can't overflow mul x20,x7,x6 // a[3]*a[2] umulh x1,x7,x6 adds x9,x9,x10 // accumulate high parts of multiplication mul x14,x4,x4 // a[0]*a[0] adc x10,x11,xzr // can't overflow adds x17,x17,x8 // accumulate low parts of multiplication umulh x4,x4,x4 adcs x19,x19,x9 mul x9,x5,x5 // a[1]*a[1] adcs x20,x20,x10 umulh x5,x5,x5 adc x1,x1,xzr // can't overflow adds x15,x15,x15 // acc[1-6]*=2 mul x10,x6,x6 // a[2]*a[2] adcs x16,x16,x16 umulh x6,x6,x6 adcs x17,x17,x17 mul x11,x7,x7 // a[3]*a[3] adcs x19,x19,x19 umulh x7,x7,x7 adcs x20,x20,x20 adcs x1,x1,x1 adc x3,xzr,xzr adds x15,x15,x4 // +a[i]*a[i] mul x24,x14,x23 adcs x16,x16,x9 adcs x17,x17,x5 adcs x19,x19,x10 adcs x20,x20,x6 adcs x1,x1,x11 adc x3,x3,x7 subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 adc x11,x11,xzr adds x14,x15,x10 adcs x15,x16,x11 adcs x16,x17,x24 adc x17,xzr,x24 // can't overflow mul x11,x14,x23 lsl x8,x24,#32 subs x15,x15,x24 lsr x9,x24,#32 sbcs x16,x16,x8 sbc x17,x17,x9 // can't borrow subs xzr,x14,#1 umulh x9,x12,x11 mul x10,x13,x11 umulh x24,x13,x11 adcs x10,x10,x9 adc x24,x24,xzr adds x14,x15,x10 adcs x15,x16,x24 adcs x16,x17,x11 adc x17,xzr,x11 // can't overflow mul x24,x14,x23 lsl x8,x11,#32 subs x15,x15,x11 lsr x9,x11,#32 sbcs x16,x16,x8 sbc x17,x17,x9 // can't borrow subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 adc x11,x11,xzr adds x14,x15,x10 adcs x15,x16,x11 adcs x16,x17,x24 adc x17,xzr,x24 // can't overflow mul x11,x14,x23 lsl x8,x24,#32 subs x15,x15,x24 lsr x9,x24,#32 sbcs x16,x16,x8 sbc x17,x17,x9 // can't borrow subs xzr,x14,#1 umulh x9,x12,x11 mul x10,x13,x11 umulh x24,x13,x11 adcs x10,x10,x9 adc x24,x24,xzr adds x14,x15,x10 adcs x15,x16,x24 adcs x16,x17,x11 adc x17,xzr,x11 // can't overflow lsl x8,x11,#32 subs x15,x15,x11 lsr x9,x11,#32 sbcs x16,x16,x8 sbc x17,x17,x9 // can't borrow adds x14,x14,x19 // accumulate upper half adcs x15,x15,x20 adcs x16,x16,x1 adcs x17,x17,x3 adc x19,xzr,xzr subs x8,x14,x12 // ret -= modulus sbcs x9,x15,x13 sbcs x10,x16,x21 sbcs x11,x17,x22 sbcs xzr,x19,xzr csel x4,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x5,x15,x9,lo csel x6,x16,x10,lo csel x7,x17,x11,lo cbnz x2,.Loop_ord_sqr stp x4,x5,[x0] stp x6,x7,[x0,#16] ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] ldp x23,x24,[sp,#48] ldr x29,[sp],#64 ret .size ecp_nistz256_ord_sqr_mont,.-ecp_nistz256_ord_sqr_mont //////////////////////////////////////////////////////////////////////// // void ecp_nistz256_select_w5(uint64_t *val, uint64_t *in_t, int index); .globl ecp_nistz256_select_w5 .hidden ecp_nistz256_select_w5 .type ecp_nistz256_select_w5,%function .align 4 ecp_nistz256_select_w5: AARCH64_VALID_CALL_TARGET // x10 := x0 // w9 := 0; loop counter and incremented internal index mov x10, x0 mov w9, #0 // [v16-v21] := 0 movi v16.16b, #0 movi v17.16b, #0 movi v18.16b, #0 movi v19.16b, #0 movi v20.16b, #0 movi v21.16b, #0 .Lselect_w5_loop: // Loop 16 times. // Increment index (loop counter); tested at the end of the loop add w9, w9, #1 // [v22-v27] := Load a (3*256-bit = 6*128-bit) table entry starting at x1 // and advance x1 to point to the next entry ld1 {v22.2d, v23.2d, v24.2d, v25.2d}, [x1],#64 // x11 := (w9 == w2)? All 1s : All 0s cmp w9, w2 csetm x11, eq // continue loading ... ld1 {v26.2d, v27.2d}, [x1],#32 // duplicate mask_64 into Mask (all 0s or all 1s) dup v3.2d, x11 // [v16-v19] := (Mask == all 1s)? [v22-v25] : [v16-v19] // i.e., values in output registers will remain the same if w9 != w2 bit v16.16b, v22.16b, v3.16b bit v17.16b, v23.16b, v3.16b bit v18.16b, v24.16b, v3.16b bit v19.16b, v25.16b, v3.16b bit v20.16b, v26.16b, v3.16b bit v21.16b, v27.16b, v3.16b // If bit #4 is not 0 (i.e. idx_ctr < 16) loop back tbz w9, #4, .Lselect_w5_loop // Write [v16-v21] to memory at the output pointer st1 {v16.2d, v17.2d, v18.2d, v19.2d}, [x10],#64 st1 {v20.2d, v21.2d}, [x10] ret .size ecp_nistz256_select_w5,.-ecp_nistz256_select_w5 //////////////////////////////////////////////////////////////////////// // void ecp_nistz256_select_w7(uint64_t *val, uint64_t *in_t, int index); .globl ecp_nistz256_select_w7 .hidden ecp_nistz256_select_w7 .type ecp_nistz256_select_w7,%function .align 4 ecp_nistz256_select_w7: AARCH64_VALID_CALL_TARGET // w9 := 0; loop counter and incremented internal index mov w9, #0 // [v16-v21] := 0 movi v16.16b, #0 movi v17.16b, #0 movi v18.16b, #0 movi v19.16b, #0 .Lselect_w7_loop: // Loop 64 times. // Increment index (loop counter); tested at the end of the loop add w9, w9, #1 // [v22-v25] := Load a (2*256-bit = 4*128-bit) table entry starting at x1 // and advance x1 to point to the next entry ld1 {v22.2d, v23.2d, v24.2d, v25.2d}, [x1],#64 // x11 := (w9 == w2)? All 1s : All 0s cmp w9, w2 csetm x11, eq // duplicate mask_64 into Mask (all 0s or all 1s) dup v3.2d, x11 // [v16-v19] := (Mask == all 1s)? [v22-v25] : [v16-v19] // i.e., values in output registers will remain the same if w9 != w2 bit v16.16b, v22.16b, v3.16b bit v17.16b, v23.16b, v3.16b bit v18.16b, v24.16b, v3.16b bit v19.16b, v25.16b, v3.16b // If bit #6 is not 0 (i.e. idx_ctr < 64) loop back tbz w9, #6, .Lselect_w7_loop // Write [v16-v19] to memory at the output pointer st1 {v16.2d, v17.2d, v18.2d, v19.2d}, [x0] ret .size ecp_nistz256_select_w7,.-ecp_nistz256_select_w7 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
45,307
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/md5-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) .text .globl md5_block_asm_data_order .hidden md5_block_asm_data_order .type md5_block_asm_data_order,@function md5_block_asm_data_order: // Save all callee-saved registers stp x19,x20,[sp,#-80]! stp x21,x22,[sp,#16] stp x23,x24,[sp,#32] stp x25,x26,[sp,#48] stp x27,x28,[sp,#64] ldp w10, w11, [x0, #0] // .Load MD5 state->A and state->B ldp w12, w13, [x0, #8] // .Load MD5 state->C and state->D .align 5 md5_blocks_loop: eor x17, x12, x13 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) and x16, x17, x11 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) ldp x15, x3, [x1] // .Load 4 words of input data0 M[0]/0 eor x14, x16, x13 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x9, #0xa478 // .Load lower half of constant 0xd76aa478 movk x9, #0xd76a, lsl #16 // .Load upper half of constant 0xd76aa478 add w8, w10, w15 // Add dest value add w7, w8, w9 // Add constant 0xd76aa478 add w6, w7, w14 // Add aux function result ror w6, w6, #25 // Rotate left s=7 bits eor x5, x11, x12 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w4, w11, w6 // Add X parameter round 1 A=FF(A, B, C, D, 0xd76aa478, s=7, M[0]) and x8, x5, x4 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x17, x8, x12 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x16, #0xb756 // .Load lower half of constant 0xe8c7b756 movk x16, #0xe8c7, lsl #16 // .Load upper half of constant 0xe8c7b756 lsr x20, x15, #32 // Right shift high input value containing M[1] add w9, w13, w20 // Add dest value add w7, w9, w16 // Add constant 0xe8c7b756 add w14, w7, w17 // Add aux function result ror w14, w14, #20 // Rotate left s=12 bits eor x6, x4, x11 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w5, w4, w14 // Add X parameter round 1 D=FF(D, A, B, C, 0xe8c7b756, s=12, M[1]) and x8, x6, x5 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x9, x8, x11 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x16, #0x70db // .Load lower half of constant 0x242070db movk x16, #0x2420, lsl #16 // .Load upper half of constant 0x242070db add w7, w12, w3 // Add dest value add w17, w7, w16 // Add constant 0x242070db add w14, w17, w9 // Add aux function result ror w14, w14, #15 // Rotate left s=17 bits eor x6, x5, x4 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w8, w5, w14 // Add X parameter round 1 C=FF(C, D, A, B, 0x242070db, s=17, M[2]) and x7, x6, x8 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x16, x7, x4 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x9, #0xceee // .Load lower half of constant 0xc1bdceee movk x9, #0xc1bd, lsl #16 // .Load upper half of constant 0xc1bdceee lsr x21, x3, #32 // Right shift high input value containing M[3] add w14, w11, w21 // Add dest value add w6, w14, w9 // Add constant 0xc1bdceee add w7, w6, w16 // Add aux function result ror w7, w7, #10 // Rotate left s=22 bits eor x17, x8, x5 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w9, w8, w7 // Add X parameter round 1 B=FF(B, C, D, A, 0xc1bdceee, s=22, M[3]) ldp x14, x7, [x1, #16] // .Load 4 words of input data0 M[4]/0w and x16, x17, x9 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x16, x5 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x16, #0xfaf // .Load lower half of constant 0xf57c0faf movk x16, #0xf57c, lsl #16 // .Load upper half of constant 0xf57c0faf add w17, w4, w14 // Add dest value add w16, w17, w16 // Add constant 0xf57c0faf add w4, w16, w6 // Add aux function result ror w4, w4, #25 // Rotate left s=7 bits eor x16, x9, x8 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w17, w9, w4 // Add X parameter round 1 A=FF(A, B, C, D, 0xf57c0faf, s=7, M[4]) and x16, x16, x17 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x16, x8 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x4, #0xc62a // .Load lower half of constant 0x4787c62a movk x4, #0x4787, lsl #16 // .Load upper half of constant 0x4787c62a lsr x22, x14, #32 // Right shift high input value containing M[5] add w16, w5, w22 // Add dest value add w16, w16, w4 // Add constant 0x4787c62a add w5, w16, w6 // Add aux function result ror w5, w5, #20 // Rotate left s=12 bits eor x4, x17, x9 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w19, w17, w5 // Add X parameter round 1 D=FF(D, A, B, C, 0x4787c62a, s=12, M[5]) and x6, x4, x19 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x5, x6, x9 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x4, #0x4613 // .Load lower half of constant 0xa8304613 movk x4, #0xa830, lsl #16 // .Load upper half of constant 0xa8304613 add w6, w8, w7 // Add dest value add w8, w6, w4 // Add constant 0xa8304613 add w4, w8, w5 // Add aux function result ror w4, w4, #15 // Rotate left s=17 bits eor x6, x19, x17 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w8, w19, w4 // Add X parameter round 1 C=FF(C, D, A, B, 0xa8304613, s=17, M[6]) and x5, x6, x8 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x4, x5, x17 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x6, #0x9501 // .Load lower half of constant 0xfd469501 movk x6, #0xfd46, lsl #16 // .Load upper half of constant 0xfd469501 lsr x23, x7, #32 // Right shift high input value containing M[7] add w9, w9, w23 // Add dest value add w5, w9, w6 // Add constant 0xfd469501 add w9, w5, w4 // Add aux function result ror w9, w9, #10 // Rotate left s=22 bits eor x6, x8, x19 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w4, w8, w9 // Add X parameter round 1 B=FF(B, C, D, A, 0xfd469501, s=22, M[7]) ldp x5, x16, [x1, #32] // .Load 4 words of input data0 M[8]/0 and x9, x6, x4 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x9, x19 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x9, #0x98d8 // .Load lower half of constant 0x698098d8 movk x9, #0x6980, lsl #16 // .Load upper half of constant 0x698098d8 add w17, w17, w5 // Add dest value add w9, w17, w9 // Add constant 0x698098d8 add w17, w9, w6 // Add aux function result ror w17, w17, #25 // Rotate left s=7 bits eor x9, x4, x8 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w6, w4, w17 // Add X parameter round 1 A=FF(A, B, C, D, 0x698098d8, s=7, M[8]) and x17, x9, x6 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x9, x17, x8 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x17, #0xf7af // .Load lower half of constant 0x8b44f7af movk x17, #0x8b44, lsl #16 // .Load upper half of constant 0x8b44f7af lsr x24, x5, #32 // Right shift high input value containing M[9] add w19, w19, w24 // Add dest value add w17, w19, w17 // Add constant 0x8b44f7af add w19, w17, w9 // Add aux function result ror w19, w19, #20 // Rotate left s=12 bits eor x9, x6, x4 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w17, w6, w19 // Add X parameter round 1 D=FF(D, A, B, C, 0x8b44f7af, s=12, M[9]) and x9, x9, x17 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x9, x9, x4 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x11, #0x5bb1 // .Load lower half of constant 0xffff5bb1 movk x11, #0xffff, lsl #16 // .Load upper half of constant 0xffff5bb1 add w8, w8, w16 // Add dest value add w8, w8, w11 // Add constant 0xffff5bb1 add w8, w8, w9 // Add aux function result ror w8, w8, #15 // Rotate left s=17 bits eor x9, x17, x6 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w8, w17, w8 // Add X parameter round 1 C=FF(C, D, A, B, 0xffff5bb1, s=17, M[10]) and x9, x9, x8 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x9, x9, x6 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x11, #0xd7be // .Load lower half of constant 0x895cd7be movk x11, #0x895c, lsl #16 // .Load upper half of constant 0x895cd7be lsr x25, x16, #32 // Right shift high input value containing M[11] add w4, w4, w25 // Add dest value add w4, w4, w11 // Add constant 0x895cd7be add w9, w4, w9 // Add aux function result ror w9, w9, #10 // Rotate left s=22 bits eor x4, x8, x17 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w9, w8, w9 // Add X parameter round 1 B=FF(B, C, D, A, 0x895cd7be, s=22, M[11]) ldp x11, x12, [x1, #48] // .Load 4 words of input data0 M[12]/0 and x4, x4, x9 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x4, x4, x17 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x19, #0x1122 // .Load lower half of constant 0x6b901122 movk x19, #0x6b90, lsl #16 // .Load upper half of constant 0x6b901122 add w6, w6, w11 // Add dest value add w6, w6, w19 // Add constant 0x6b901122 add w4, w6, w4 // Add aux function result ror w4, w4, #25 // Rotate left s=7 bits eor x6, x9, x8 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w4, w9, w4 // Add X parameter round 1 A=FF(A, B, C, D, 0x6b901122, s=7, M[12]) and x6, x6, x4 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x6, x8 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x19, #0x7193 // .Load lower half of constant 0xfd987193 movk x19, #0xfd98, lsl #16 // .Load upper half of constant 0xfd987193 lsr x26, x11, #32 // Right shift high input value containing M[13] add w17, w17, w26 // Add dest value add w17, w17, w19 // Add constant 0xfd987193 add w17, w17, w6 // Add aux function result ror w17, w17, #20 // Rotate left s=12 bits eor x6, x4, x9 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w17, w4, w17 // Add X parameter round 1 D=FF(D, A, B, C, 0xfd987193, s=12, M[13]) and x6, x6, x17 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x6, x9 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x13, #0x438e // .Load lower half of constant 0xa679438e movk x13, #0xa679, lsl #16 // .Load upper half of constant 0xa679438e add w8, w8, w12 // Add dest value add w8, w8, w13 // Add constant 0xa679438e add w8, w8, w6 // Add aux function result ror w8, w8, #15 // Rotate left s=17 bits eor x6, x17, x4 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w8, w17, w8 // Add X parameter round 1 C=FF(C, D, A, B, 0xa679438e, s=17, M[14]) and x6, x6, x8 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x6, x4 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x13, #0x821 // .Load lower half of constant 0x49b40821 movk x13, #0x49b4, lsl #16 // .Load upper half of constant 0x49b40821 lsr x27, x12, #32 // Right shift high input value containing M[15] add w9, w9, w27 // Add dest value add w9, w9, w13 // Add constant 0x49b40821 add w9, w9, w6 // Add aux function result ror w9, w9, #10 // Rotate left s=22 bits bic x6, x8, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w9, w8, w9 // Add X parameter round 1 B=FF(B, C, D, A, 0x49b40821, s=22, M[15]) and x13, x9, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x2562 // .Load lower half of constant 0xf61e2562 movk x13, #0xf61e, lsl #16 // .Load upper half of constant 0xf61e2562 add w4, w4, w20 // Add dest value add w4, w4, w13 // Add constant 0xf61e2562 add w4, w4, w6 // Add aux function result ror w4, w4, #27 // Rotate left s=5 bits bic x6, x9, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w4, w9, w4 // Add X parameter round 2 A=GG(A, B, C, D, 0xf61e2562, s=5, M[1]) and x13, x4, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xb340 // .Load lower half of constant 0xc040b340 movk x13, #0xc040, lsl #16 // .Load upper half of constant 0xc040b340 add w17, w17, w7 // Add dest value add w17, w17, w13 // Add constant 0xc040b340 add w17, w17, w6 // Add aux function result ror w17, w17, #23 // Rotate left s=9 bits bic x6, x4, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w17, w4, w17 // Add X parameter round 2 D=GG(D, A, B, C, 0xc040b340, s=9, M[6]) and x13, x17, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x5a51 // .Load lower half of constant 0x265e5a51 movk x13, #0x265e, lsl #16 // .Load upper half of constant 0x265e5a51 add w8, w8, w25 // Add dest value add w8, w8, w13 // Add constant 0x265e5a51 add w8, w8, w6 // Add aux function result ror w8, w8, #18 // Rotate left s=14 bits bic x6, x17, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w8, w17, w8 // Add X parameter round 2 C=GG(C, D, A, B, 0x265e5a51, s=14, M[11]) and x13, x8, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xc7aa // .Load lower half of constant 0xe9b6c7aa movk x13, #0xe9b6, lsl #16 // .Load upper half of constant 0xe9b6c7aa add w9, w9, w15 // Add dest value add w9, w9, w13 // Add constant 0xe9b6c7aa add w9, w9, w6 // Add aux function result ror w9, w9, #12 // Rotate left s=20 bits bic x6, x8, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w9, w8, w9 // Add X parameter round 2 B=GG(B, C, D, A, 0xe9b6c7aa, s=20, M[0]) and x13, x9, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x105d // .Load lower half of constant 0xd62f105d movk x13, #0xd62f, lsl #16 // .Load upper half of constant 0xd62f105d add w4, w4, w22 // Add dest value add w4, w4, w13 // Add constant 0xd62f105d add w4, w4, w6 // Add aux function result ror w4, w4, #27 // Rotate left s=5 bits bic x6, x9, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w4, w9, w4 // Add X parameter round 2 A=GG(A, B, C, D, 0xd62f105d, s=5, M[5]) and x13, x4, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x1453 // .Load lower half of constant 0x2441453 movk x13, #0x244, lsl #16 // .Load upper half of constant 0x2441453 add w17, w17, w16 // Add dest value add w17, w17, w13 // Add constant 0x2441453 add w17, w17, w6 // Add aux function result ror w17, w17, #23 // Rotate left s=9 bits bic x6, x4, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w17, w4, w17 // Add X parameter round 2 D=GG(D, A, B, C, 0x2441453, s=9, M[10]) and x13, x17, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xe681 // .Load lower half of constant 0xd8a1e681 movk x13, #0xd8a1, lsl #16 // .Load upper half of constant 0xd8a1e681 add w8, w8, w27 // Add dest value add w8, w8, w13 // Add constant 0xd8a1e681 add w8, w8, w6 // Add aux function result ror w8, w8, #18 // Rotate left s=14 bits bic x6, x17, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w8, w17, w8 // Add X parameter round 2 C=GG(C, D, A, B, 0xd8a1e681, s=14, M[15]) and x13, x8, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xfbc8 // .Load lower half of constant 0xe7d3fbc8 movk x13, #0xe7d3, lsl #16 // .Load upper half of constant 0xe7d3fbc8 add w9, w9, w14 // Add dest value add w9, w9, w13 // Add constant 0xe7d3fbc8 add w9, w9, w6 // Add aux function result ror w9, w9, #12 // Rotate left s=20 bits bic x6, x8, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w9, w8, w9 // Add X parameter round 2 B=GG(B, C, D, A, 0xe7d3fbc8, s=20, M[4]) and x13, x9, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xcde6 // .Load lower half of constant 0x21e1cde6 movk x13, #0x21e1, lsl #16 // .Load upper half of constant 0x21e1cde6 add w4, w4, w24 // Add dest value add w4, w4, w13 // Add constant 0x21e1cde6 add w4, w4, w6 // Add aux function result ror w4, w4, #27 // Rotate left s=5 bits bic x6, x9, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w4, w9, w4 // Add X parameter round 2 A=GG(A, B, C, D, 0x21e1cde6, s=5, M[9]) and x13, x4, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x7d6 // .Load lower half of constant 0xc33707d6 movk x13, #0xc337, lsl #16 // .Load upper half of constant 0xc33707d6 add w17, w17, w12 // Add dest value add w17, w17, w13 // Add constant 0xc33707d6 add w17, w17, w6 // Add aux function result ror w17, w17, #23 // Rotate left s=9 bits bic x6, x4, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w17, w4, w17 // Add X parameter round 2 D=GG(D, A, B, C, 0xc33707d6, s=9, M[14]) and x13, x17, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xd87 // .Load lower half of constant 0xf4d50d87 movk x13, #0xf4d5, lsl #16 // .Load upper half of constant 0xf4d50d87 add w8, w8, w21 // Add dest value add w8, w8, w13 // Add constant 0xf4d50d87 add w8, w8, w6 // Add aux function result ror w8, w8, #18 // Rotate left s=14 bits bic x6, x17, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w8, w17, w8 // Add X parameter round 2 C=GG(C, D, A, B, 0xf4d50d87, s=14, M[3]) and x13, x8, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x14ed // .Load lower half of constant 0x455a14ed movk x13, #0x455a, lsl #16 // .Load upper half of constant 0x455a14ed add w9, w9, w5 // Add dest value add w9, w9, w13 // Add constant 0x455a14ed add w9, w9, w6 // Add aux function result ror w9, w9, #12 // Rotate left s=20 bits bic x6, x8, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w9, w8, w9 // Add X parameter round 2 B=GG(B, C, D, A, 0x455a14ed, s=20, M[8]) and x13, x9, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xe905 // .Load lower half of constant 0xa9e3e905 movk x13, #0xa9e3, lsl #16 // .Load upper half of constant 0xa9e3e905 add w4, w4, w26 // Add dest value add w4, w4, w13 // Add constant 0xa9e3e905 add w4, w4, w6 // Add aux function result ror w4, w4, #27 // Rotate left s=5 bits bic x6, x9, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w4, w9, w4 // Add X parameter round 2 A=GG(A, B, C, D, 0xa9e3e905, s=5, M[13]) and x13, x4, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xa3f8 // .Load lower half of constant 0xfcefa3f8 movk x13, #0xfcef, lsl #16 // .Load upper half of constant 0xfcefa3f8 add w17, w17, w3 // Add dest value add w17, w17, w13 // Add constant 0xfcefa3f8 add w17, w17, w6 // Add aux function result ror w17, w17, #23 // Rotate left s=9 bits bic x6, x4, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w17, w4, w17 // Add X parameter round 2 D=GG(D, A, B, C, 0xfcefa3f8, s=9, M[2]) and x13, x17, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x2d9 // .Load lower half of constant 0x676f02d9 movk x13, #0x676f, lsl #16 // .Load upper half of constant 0x676f02d9 add w8, w8, w23 // Add dest value add w8, w8, w13 // Add constant 0x676f02d9 add w8, w8, w6 // Add aux function result ror w8, w8, #18 // Rotate left s=14 bits bic x6, x17, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w8, w17, w8 // Add X parameter round 2 C=GG(C, D, A, B, 0x676f02d9, s=14, M[7]) and x13, x8, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x4c8a // .Load lower half of constant 0x8d2a4c8a movk x13, #0x8d2a, lsl #16 // .Load upper half of constant 0x8d2a4c8a add w9, w9, w11 // Add dest value add w9, w9, w13 // Add constant 0x8d2a4c8a add w9, w9, w6 // Add aux function result eor x6, x8, x17 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w9, w9, #12 // Rotate left s=20 bits movz x10, #0x3942 // .Load lower half of constant 0xfffa3942 add w9, w8, w9 // Add X parameter round 2 B=GG(B, C, D, A, 0x8d2a4c8a, s=20, M[12]) movk x10, #0xfffa, lsl #16 // .Load upper half of constant 0xfffa3942 add w4, w4, w22 // Add dest value eor x6, x6, x9 // End aux function round 3 H(x,y,z)=(x^y^z) add w4, w4, w10 // Add constant 0xfffa3942 add w4, w4, w6 // Add aux function result ror w4, w4, #28 // Rotate left s=4 bits eor x6, x9, x8 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x10, #0xf681 // .Load lower half of constant 0x8771f681 add w4, w9, w4 // Add X parameter round 3 A=HH(A, B, C, D, 0xfffa3942, s=4, M[5]) movk x10, #0x8771, lsl #16 // .Load upper half of constant 0x8771f681 add w17, w17, w5 // Add dest value eor x6, x6, x4 // End aux function round 3 H(x,y,z)=(x^y^z) add w17, w17, w10 // Add constant 0x8771f681 add w17, w17, w6 // Add aux function result eor x6, x4, x9 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w17, w17, #21 // Rotate left s=11 bits movz x13, #0x6122 // .Load lower half of constant 0x6d9d6122 add w17, w4, w17 // Add X parameter round 3 D=HH(D, A, B, C, 0x8771f681, s=11, M[8]) movk x13, #0x6d9d, lsl #16 // .Load upper half of constant 0x6d9d6122 add w8, w8, w25 // Add dest value eor x6, x6, x17 // End aux function round 3 H(x,y,z)=(x^y^z) add w8, w8, w13 // Add constant 0x6d9d6122 add w8, w8, w6 // Add aux function result ror w8, w8, #16 // Rotate left s=16 bits eor x6, x17, x4 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x13, #0x380c // .Load lower half of constant 0xfde5380c add w8, w17, w8 // Add X parameter round 3 C=HH(C, D, A, B, 0x6d9d6122, s=16, M[11]) movk x13, #0xfde5, lsl #16 // .Load upper half of constant 0xfde5380c add w9, w9, w12 // Add dest value eor x6, x6, x8 // End aux function round 3 H(x,y,z)=(x^y^z) add w9, w9, w13 // Add constant 0xfde5380c add w9, w9, w6 // Add aux function result eor x6, x8, x17 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w9, w9, #9 // Rotate left s=23 bits movz x10, #0xea44 // .Load lower half of constant 0xa4beea44 add w9, w8, w9 // Add X parameter round 3 B=HH(B, C, D, A, 0xfde5380c, s=23, M[14]) movk x10, #0xa4be, lsl #16 // .Load upper half of constant 0xa4beea44 add w4, w4, w20 // Add dest value eor x6, x6, x9 // End aux function round 3 H(x,y,z)=(x^y^z) add w4, w4, w10 // Add constant 0xa4beea44 add w4, w4, w6 // Add aux function result ror w4, w4, #28 // Rotate left s=4 bits eor x6, x9, x8 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x10, #0xcfa9 // .Load lower half of constant 0x4bdecfa9 add w4, w9, w4 // Add X parameter round 3 A=HH(A, B, C, D, 0xa4beea44, s=4, M[1]) movk x10, #0x4bde, lsl #16 // .Load upper half of constant 0x4bdecfa9 add w17, w17, w14 // Add dest value eor x6, x6, x4 // End aux function round 3 H(x,y,z)=(x^y^z) add w17, w17, w10 // Add constant 0x4bdecfa9 add w17, w17, w6 // Add aux function result eor x6, x4, x9 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w17, w17, #21 // Rotate left s=11 bits movz x13, #0x4b60 // .Load lower half of constant 0xf6bb4b60 add w17, w4, w17 // Add X parameter round 3 D=HH(D, A, B, C, 0x4bdecfa9, s=11, M[4]) movk x13, #0xf6bb, lsl #16 // .Load upper half of constant 0xf6bb4b60 add w8, w8, w23 // Add dest value eor x6, x6, x17 // End aux function round 3 H(x,y,z)=(x^y^z) add w8, w8, w13 // Add constant 0xf6bb4b60 add w8, w8, w6 // Add aux function result ror w8, w8, #16 // Rotate left s=16 bits eor x6, x17, x4 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x13, #0xbc70 // .Load lower half of constant 0xbebfbc70 add w8, w17, w8 // Add X parameter round 3 C=HH(C, D, A, B, 0xf6bb4b60, s=16, M[7]) movk x13, #0xbebf, lsl #16 // .Load upper half of constant 0xbebfbc70 add w9, w9, w16 // Add dest value eor x6, x6, x8 // End aux function round 3 H(x,y,z)=(x^y^z) add w9, w9, w13 // Add constant 0xbebfbc70 add w9, w9, w6 // Add aux function result eor x6, x8, x17 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w9, w9, #9 // Rotate left s=23 bits movz x10, #0x7ec6 // .Load lower half of constant 0x289b7ec6 add w9, w8, w9 // Add X parameter round 3 B=HH(B, C, D, A, 0xbebfbc70, s=23, M[10]) movk x10, #0x289b, lsl #16 // .Load upper half of constant 0x289b7ec6 add w4, w4, w26 // Add dest value eor x6, x6, x9 // End aux function round 3 H(x,y,z)=(x^y^z) add w4, w4, w10 // Add constant 0x289b7ec6 add w4, w4, w6 // Add aux function result ror w4, w4, #28 // Rotate left s=4 bits eor x6, x9, x8 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x10, #0x27fa // .Load lower half of constant 0xeaa127fa add w4, w9, w4 // Add X parameter round 3 A=HH(A, B, C, D, 0x289b7ec6, s=4, M[13]) movk x10, #0xeaa1, lsl #16 // .Load upper half of constant 0xeaa127fa add w17, w17, w15 // Add dest value eor x6, x6, x4 // End aux function round 3 H(x,y,z)=(x^y^z) add w17, w17, w10 // Add constant 0xeaa127fa add w17, w17, w6 // Add aux function result eor x6, x4, x9 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w17, w17, #21 // Rotate left s=11 bits movz x13, #0x3085 // .Load lower half of constant 0xd4ef3085 add w17, w4, w17 // Add X parameter round 3 D=HH(D, A, B, C, 0xeaa127fa, s=11, M[0]) movk x13, #0xd4ef, lsl #16 // .Load upper half of constant 0xd4ef3085 add w8, w8, w21 // Add dest value eor x6, x6, x17 // End aux function round 3 H(x,y,z)=(x^y^z) add w8, w8, w13 // Add constant 0xd4ef3085 add w8, w8, w6 // Add aux function result ror w8, w8, #16 // Rotate left s=16 bits eor x6, x17, x4 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x13, #0x1d05 // .Load lower half of constant 0x4881d05 add w8, w17, w8 // Add X parameter round 3 C=HH(C, D, A, B, 0xd4ef3085, s=16, M[3]) movk x13, #0x488, lsl #16 // .Load upper half of constant 0x4881d05 add w9, w9, w7 // Add dest value eor x6, x6, x8 // End aux function round 3 H(x,y,z)=(x^y^z) add w9, w9, w13 // Add constant 0x4881d05 add w9, w9, w6 // Add aux function result eor x6, x8, x17 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w9, w9, #9 // Rotate left s=23 bits movz x10, #0xd039 // .Load lower half of constant 0xd9d4d039 add w9, w8, w9 // Add X parameter round 3 B=HH(B, C, D, A, 0x4881d05, s=23, M[6]) movk x10, #0xd9d4, lsl #16 // .Load upper half of constant 0xd9d4d039 add w4, w4, w24 // Add dest value eor x6, x6, x9 // End aux function round 3 H(x,y,z)=(x^y^z) add w4, w4, w10 // Add constant 0xd9d4d039 add w4, w4, w6 // Add aux function result ror w4, w4, #28 // Rotate left s=4 bits eor x6, x9, x8 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x10, #0x99e5 // .Load lower half of constant 0xe6db99e5 add w4, w9, w4 // Add X parameter round 3 A=HH(A, B, C, D, 0xd9d4d039, s=4, M[9]) movk x10, #0xe6db, lsl #16 // .Load upper half of constant 0xe6db99e5 add w17, w17, w11 // Add dest value eor x6, x6, x4 // End aux function round 3 H(x,y,z)=(x^y^z) add w17, w17, w10 // Add constant 0xe6db99e5 add w17, w17, w6 // Add aux function result eor x6, x4, x9 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w17, w17, #21 // Rotate left s=11 bits movz x13, #0x7cf8 // .Load lower half of constant 0x1fa27cf8 add w17, w4, w17 // Add X parameter round 3 D=HH(D, A, B, C, 0xe6db99e5, s=11, M[12]) movk x13, #0x1fa2, lsl #16 // .Load upper half of constant 0x1fa27cf8 add w8, w8, w27 // Add dest value eor x6, x6, x17 // End aux function round 3 H(x,y,z)=(x^y^z) add w8, w8, w13 // Add constant 0x1fa27cf8 add w8, w8, w6 // Add aux function result ror w8, w8, #16 // Rotate left s=16 bits eor x6, x17, x4 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x13, #0x5665 // .Load lower half of constant 0xc4ac5665 add w8, w17, w8 // Add X parameter round 3 C=HH(C, D, A, B, 0x1fa27cf8, s=16, M[15]) movk x13, #0xc4ac, lsl #16 // .Load upper half of constant 0xc4ac5665 add w9, w9, w3 // Add dest value eor x6, x6, x8 // End aux function round 3 H(x,y,z)=(x^y^z) add w9, w9, w13 // Add constant 0xc4ac5665 add w9, w9, w6 // Add aux function result ror w9, w9, #9 // Rotate left s=23 bits movz x6, #0x2244 // .Load lower half of constant 0xf4292244 movk x6, #0xf429, lsl #16 // .Load upper half of constant 0xf4292244 add w9, w8, w9 // Add X parameter round 3 B=HH(B, C, D, A, 0xc4ac5665, s=23, M[2]) add w4, w4, w15 // Add dest value orn x13, x9, x17 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w4, w4, w6 // Add constant 0xf4292244 eor x6, x8, x13 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w4, w4, w6 // Add aux function result ror w4, w4, #26 // Rotate left s=6 bits movz x6, #0xff97 // .Load lower half of constant 0x432aff97 movk x6, #0x432a, lsl #16 // .Load upper half of constant 0x432aff97 add w4, w9, w4 // Add X parameter round 4 A=II(A, B, C, D, 0xf4292244, s=6, M[0]) orn x10, x4, x8 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w17, w17, w23 // Add dest value eor x10, x9, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w17, w17, w6 // Add constant 0x432aff97 add w6, w17, w10 // Add aux function result ror w6, w6, #22 // Rotate left s=10 bits movz x17, #0x23a7 // .Load lower half of constant 0xab9423a7 movk x17, #0xab94, lsl #16 // .Load upper half of constant 0xab9423a7 add w6, w4, w6 // Add X parameter round 4 D=II(D, A, B, C, 0x432aff97, s=10, M[7]) add w8, w8, w12 // Add dest value orn x10, x6, x9 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w8, w17 // Add constant 0xab9423a7 eor x17, x4, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w8, w17 // Add aux function result ror w8, w8, #17 // Rotate left s=15 bits movz x17, #0xa039 // .Load lower half of constant 0xfc93a039 movk x17, #0xfc93, lsl #16 // .Load upper half of constant 0xfc93a039 add w8, w6, w8 // Add X parameter round 4 C=II(C, D, A, B, 0xab9423a7, s=15, M[14]) orn x13, x8, x4 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w9, w22 // Add dest value eor x13, x6, x13 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w9, w17 // Add constant 0xfc93a039 add w17, w9, w13 // Add aux function result ror w17, w17, #11 // Rotate left s=21 bits movz x9, #0x59c3 // .Load lower half of constant 0x655b59c3 movk x9, #0x655b, lsl #16 // .Load upper half of constant 0x655b59c3 add w17, w8, w17 // Add X parameter round 4 B=II(B, C, D, A, 0xfc93a039, s=21, M[5]) add w4, w4, w11 // Add dest value orn x13, x17, x6 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w4, w9 // Add constant 0x655b59c3 eor x4, x8, x13 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w9, w4 // Add aux function result ror w9, w9, #26 // Rotate left s=6 bits movz x4, #0xcc92 // .Load lower half of constant 0x8f0ccc92 movk x4, #0x8f0c, lsl #16 // .Load upper half of constant 0x8f0ccc92 add w9, w17, w9 // Add X parameter round 4 A=II(A, B, C, D, 0x655b59c3, s=6, M[12]) orn x10, x9, x8 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w6, w6, w21 // Add dest value eor x10, x17, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w4, w6, w4 // Add constant 0x8f0ccc92 add w6, w4, w10 // Add aux function result ror w6, w6, #22 // Rotate left s=10 bits movz x4, #0xf47d // .Load lower half of constant 0xffeff47d movk x4, #0xffef, lsl #16 // .Load upper half of constant 0xffeff47d add w6, w9, w6 // Add X parameter round 4 D=II(D, A, B, C, 0x8f0ccc92, s=10, M[3]) add w8, w8, w16 // Add dest value orn x10, x6, x17 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w8, w4 // Add constant 0xffeff47d eor x4, x9, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w8, w4 // Add aux function result ror w8, w8, #17 // Rotate left s=15 bits movz x4, #0x5dd1 // .Load lower half of constant 0x85845dd1 movk x4, #0x8584, lsl #16 // .Load upper half of constant 0x85845dd1 add w8, w6, w8 // Add X parameter round 4 C=II(C, D, A, B, 0xffeff47d, s=15, M[10]) orn x10, x8, x9 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w17, w20 // Add dest value eor x17, x6, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w15, w4 // Add constant 0x85845dd1 add w4, w15, w17 // Add aux function result ror w4, w4, #11 // Rotate left s=21 bits movz x15, #0x7e4f // .Load lower half of constant 0x6fa87e4f movk x15, #0x6fa8, lsl #16 // .Load upper half of constant 0x6fa87e4f add w17, w8, w4 // Add X parameter round 4 B=II(B, C, D, A, 0x85845dd1, s=21, M[1]) add w4, w9, w5 // Add dest value orn x9, x17, x6 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w4, w15 // Add constant 0x6fa87e4f eor x4, x8, x9 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w15, w4 // Add aux function result ror w9, w9, #26 // Rotate left s=6 bits movz x15, #0xe6e0 // .Load lower half of constant 0xfe2ce6e0 movk x15, #0xfe2c, lsl #16 // .Load upper half of constant 0xfe2ce6e0 add w4, w17, w9 // Add X parameter round 4 A=II(A, B, C, D, 0x6fa87e4f, s=6, M[8]) orn x9, x4, x8 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w6, w6, w27 // Add dest value eor x9, x17, x9 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w6, w15 // Add constant 0xfe2ce6e0 add w6, w15, w9 // Add aux function result ror w6, w6, #22 // Rotate left s=10 bits movz x9, #0x4314 // .Load lower half of constant 0xa3014314 movk x9, #0xa301, lsl #16 // .Load upper half of constant 0xa3014314 add w15, w4, w6 // Add X parameter round 4 D=II(D, A, B, C, 0xfe2ce6e0, s=10, M[15]) add w6, w8, w7 // Add dest value orn x7, x15, x17 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w6, w9 // Add constant 0xa3014314 eor x9, x4, x7 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w6, w8, w9 // Add aux function result ror w6, w6, #17 // Rotate left s=15 bits movz x7, #0x11a1 // .Load lower half of constant 0x4e0811a1 movk x7, #0x4e08, lsl #16 // .Load upper half of constant 0x4e0811a1 add w8, w15, w6 // Add X parameter round 4 C=II(C, D, A, B, 0xa3014314, s=15, M[6]) orn x9, x8, x4 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w6, w17, w26 // Add dest value eor x17, x15, x9 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w6, w7 // Add constant 0x4e0811a1 add w7, w9, w17 // Add aux function result ror w7, w7, #11 // Rotate left s=21 bits movz x6, #0x7e82 // .Load lower half of constant 0xf7537e82 movk x6, #0xf753, lsl #16 // .Load upper half of constant 0xf7537e82 add w9, w8, w7 // Add X parameter round 4 B=II(B, C, D, A, 0x4e0811a1, s=21, M[13]) add w17, w4, w14 // Add dest value orn x7, x9, x15 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w14, w17, w6 // Add constant 0xf7537e82 eor x4, x8, x7 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w17, w14, w4 // Add aux function result ror w17, w17, #26 // Rotate left s=6 bits movz x6, #0xf235 // .Load lower half of constant 0xbd3af235 movk x6, #0xbd3a, lsl #16 // .Load upper half of constant 0xbd3af235 add w7, w9, w17 // Add X parameter round 4 A=II(A, B, C, D, 0xf7537e82, s=6, M[4]) orn x14, x7, x8 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w4, w15, w25 // Add dest value eor x17, x9, x14 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w4, w6 // Add constant 0xbd3af235 add w16, w15, w17 // Add aux function result ror w16, w16, #22 // Rotate left s=10 bits movz x14, #0xd2bb // .Load lower half of constant 0x2ad7d2bb movk x14, #0x2ad7, lsl #16 // .Load upper half of constant 0x2ad7d2bb add w4, w7, w16 // Add X parameter round 4 D=II(D, A, B, C, 0xbd3af235, s=10, M[11]) add w6, w8, w3 // Add dest value orn x15, x4, x9 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w17, w6, w14 // Add constant 0x2ad7d2bb eor x16, x7, x15 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w17, w16 // Add aux function result ror w8, w8, #17 // Rotate left s=15 bits movz x3, #0xd391 // .Load lower half of constant 0xeb86d391 movk x3, #0xeb86, lsl #16 // .Load upper half of constant 0xeb86d391 add w14, w4, w8 // Add X parameter round 4 C=II(C, D, A, B, 0x2ad7d2bb, s=15, M[2]) orn x6, x14, x7 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w9, w24 // Add dest value eor x17, x4, x6 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w16, w15, w3 // Add constant 0xeb86d391 add w8, w16, w17 // Add aux function result ror w8, w8, #11 // Rotate left s=21 bits ldp w6, w15, [x0] // Reload MD5 state->A and state->B ldp w5, w9, [x0, #8] // Reload MD5 state->C and state->D add w3, w14, w8 // Add X parameter round 4 B=II(B, C, D, A, 0xeb86d391, s=21, M[9]) add w13, w4, w9 // Add result of MD5 rounds to state->D add w12, w14, w5 // Add result of MD5 rounds to state->C add w10, w7, w6 // Add result of MD5 rounds to state->A add w11, w3, w15 // Add result of MD5 rounds to state->B stp w12, w13, [x0, #8] // Store MD5 states C,D stp w10, w11, [x0] // Store MD5 states A,B add x1, x1, #64 // Increment data pointer subs w2, w2, #1 // Decrement block counter b.ne md5_blocks_loop ldp x21,x22,[sp,#16] ldp x23,x24,[sp,#32] ldp x25,x26,[sp,#48] ldp x27,x28,[sp,#64] ldp x19,x20,[sp],#80 ret #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
49,366
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/sha512-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) // Copyright 2014-2020 The OpenSSL Project Authors. All Rights Reserved. // // Licensed under the OpenSSL license (the "License"). You may not use // this file except in compliance with the License. You can obtain a copy // in the file LICENSE in the source distribution or at // https://www.openssl.org/source/license.html // ==================================================================== // Written by Andy Polyakov <appro@openssl.org> for the OpenSSL // project. The module is, however, dual licensed under OpenSSL and // CRYPTOGAMS licenses depending on where you obtain it. For further // details see http://www.openssl.org/~appro/cryptogams/. // // Permission to use under GPLv2 terms is granted. // ==================================================================== // // SHA256/512 for ARMv8. // // Performance in cycles per processed byte and improvement coefficient // over code generated with "default" compiler: // // SHA256-hw SHA256(*) SHA512 // Apple A7 1.97 10.5 (+33%) 6.73 (-1%(**)) // Cortex-A53 2.38 15.5 (+115%) 10.0 (+150%(***)) // Cortex-A57 2.31 11.6 (+86%) 7.51 (+260%(***)) // Denver 2.01 10.5 (+26%) 6.70 (+8%) // X-Gene 20.0 (+100%) 12.8 (+300%(***)) // Mongoose 2.36 13.0 (+50%) 8.36 (+33%) // Kryo 1.92 17.4 (+30%) 11.2 (+8%) // // (*) Software SHA256 results are of lesser relevance, presented // mostly for informational purposes. // (**) The result is a trade-off: it's possible to improve it by // 10% (or by 1 cycle per round), but at the cost of 20% loss // on Cortex-A53 (or by 4 cycles per round). // (***) Super-impressive coefficients over gcc-generated code are // indication of some compiler "pathology", most notably code // generated with -mgeneral-regs-only is significantly faster // and the gap is only 40-90%. #ifndef __KERNEL__ # include <openssl/arm_arch.h> #endif .text .globl sha512_block_data_order_nohw .hidden sha512_block_data_order_nohw .type sha512_block_data_order_nohw,%function .align 6 sha512_block_data_order_nohw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#4*8 ldp x20,x21,[x0] // load context ldp x22,x23,[x0,#2*8] ldp x24,x25,[x0,#4*8] add x2,x1,x2,lsl#7 // end of input ldp x26,x27,[x0,#6*8] adrp x30,.LK512 add x30,x30,:lo12:.LK512 stp x0,x2,[x29,#96] .Loop: ldp x3,x4,[x1],#2*8 ldr x19,[x30],#8 // *K++ eor x28,x21,x22 // magic seed str x1,[x29,#112] #ifndef __AARCH64EB__ rev x3,x3 // 0 #endif ror x16,x24,#14 add x27,x27,x19 // h+=K[i] eor x6,x24,x24,ror#23 and x17,x25,x24 bic x19,x26,x24 add x27,x27,x3 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x6,ror#18 // Sigma1(e) ror x6,x20,#28 add x27,x27,x17 // h+=Ch(e,f,g) eor x17,x20,x20,ror#5 add x27,x27,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x23,x23,x27 // d+=h eor x28,x28,x21 // Maj(a,b,c) eor x17,x6,x17,ror#34 // Sigma0(a) add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x27,x27,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x4,x4 // 1 #endif ldp x5,x6,[x1],#2*8 add x27,x27,x17 // h+=Sigma0(a) ror x16,x23,#14 add x26,x26,x28 // h+=K[i] eor x7,x23,x23,ror#23 and x17,x24,x23 bic x28,x25,x23 add x26,x26,x4 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x7,ror#18 // Sigma1(e) ror x7,x27,#28 add x26,x26,x17 // h+=Ch(e,f,g) eor x17,x27,x27,ror#5 add x26,x26,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x22,x22,x26 // d+=h eor x19,x19,x20 // Maj(a,b,c) eor x17,x7,x17,ror#34 // Sigma0(a) add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x26,x26,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x5,x5 // 2 #endif add x26,x26,x17 // h+=Sigma0(a) ror x16,x22,#14 add x25,x25,x19 // h+=K[i] eor x8,x22,x22,ror#23 and x17,x23,x22 bic x19,x24,x22 add x25,x25,x5 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x8,ror#18 // Sigma1(e) ror x8,x26,#28 add x25,x25,x17 // h+=Ch(e,f,g) eor x17,x26,x26,ror#5 add x25,x25,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x21,x21,x25 // d+=h eor x28,x28,x27 // Maj(a,b,c) eor x17,x8,x17,ror#34 // Sigma0(a) add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x25,x25,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x6,x6 // 3 #endif ldp x7,x8,[x1],#2*8 add x25,x25,x17 // h+=Sigma0(a) ror x16,x21,#14 add x24,x24,x28 // h+=K[i] eor x9,x21,x21,ror#23 and x17,x22,x21 bic x28,x23,x21 add x24,x24,x6 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x9,ror#18 // Sigma1(e) ror x9,x25,#28 add x24,x24,x17 // h+=Ch(e,f,g) eor x17,x25,x25,ror#5 add x24,x24,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x20,x20,x24 // d+=h eor x19,x19,x26 // Maj(a,b,c) eor x17,x9,x17,ror#34 // Sigma0(a) add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x24,x24,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x7,x7 // 4 #endif add x24,x24,x17 // h+=Sigma0(a) ror x16,x20,#14 add x23,x23,x19 // h+=K[i] eor x10,x20,x20,ror#23 and x17,x21,x20 bic x19,x22,x20 add x23,x23,x7 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x10,ror#18 // Sigma1(e) ror x10,x24,#28 add x23,x23,x17 // h+=Ch(e,f,g) eor x17,x24,x24,ror#5 add x23,x23,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x27,x27,x23 // d+=h eor x28,x28,x25 // Maj(a,b,c) eor x17,x10,x17,ror#34 // Sigma0(a) add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x23,x23,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x8,x8 // 5 #endif ldp x9,x10,[x1],#2*8 add x23,x23,x17 // h+=Sigma0(a) ror x16,x27,#14 add x22,x22,x28 // h+=K[i] eor x11,x27,x27,ror#23 and x17,x20,x27 bic x28,x21,x27 add x22,x22,x8 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x11,ror#18 // Sigma1(e) ror x11,x23,#28 add x22,x22,x17 // h+=Ch(e,f,g) eor x17,x23,x23,ror#5 add x22,x22,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x26,x26,x22 // d+=h eor x19,x19,x24 // Maj(a,b,c) eor x17,x11,x17,ror#34 // Sigma0(a) add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x22,x22,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x9,x9 // 6 #endif add x22,x22,x17 // h+=Sigma0(a) ror x16,x26,#14 add x21,x21,x19 // h+=K[i] eor x12,x26,x26,ror#23 and x17,x27,x26 bic x19,x20,x26 add x21,x21,x9 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x12,ror#18 // Sigma1(e) ror x12,x22,#28 add x21,x21,x17 // h+=Ch(e,f,g) eor x17,x22,x22,ror#5 add x21,x21,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x25,x25,x21 // d+=h eor x28,x28,x23 // Maj(a,b,c) eor x17,x12,x17,ror#34 // Sigma0(a) add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x21,x21,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x10,x10 // 7 #endif ldp x11,x12,[x1],#2*8 add x21,x21,x17 // h+=Sigma0(a) ror x16,x25,#14 add x20,x20,x28 // h+=K[i] eor x13,x25,x25,ror#23 and x17,x26,x25 bic x28,x27,x25 add x20,x20,x10 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x13,ror#18 // Sigma1(e) ror x13,x21,#28 add x20,x20,x17 // h+=Ch(e,f,g) eor x17,x21,x21,ror#5 add x20,x20,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x24,x24,x20 // d+=h eor x19,x19,x22 // Maj(a,b,c) eor x17,x13,x17,ror#34 // Sigma0(a) add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x20,x20,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x11,x11 // 8 #endif add x20,x20,x17 // h+=Sigma0(a) ror x16,x24,#14 add x27,x27,x19 // h+=K[i] eor x14,x24,x24,ror#23 and x17,x25,x24 bic x19,x26,x24 add x27,x27,x11 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x14,ror#18 // Sigma1(e) ror x14,x20,#28 add x27,x27,x17 // h+=Ch(e,f,g) eor x17,x20,x20,ror#5 add x27,x27,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x23,x23,x27 // d+=h eor x28,x28,x21 // Maj(a,b,c) eor x17,x14,x17,ror#34 // Sigma0(a) add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x27,x27,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x12,x12 // 9 #endif ldp x13,x14,[x1],#2*8 add x27,x27,x17 // h+=Sigma0(a) ror x16,x23,#14 add x26,x26,x28 // h+=K[i] eor x15,x23,x23,ror#23 and x17,x24,x23 bic x28,x25,x23 add x26,x26,x12 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x15,ror#18 // Sigma1(e) ror x15,x27,#28 add x26,x26,x17 // h+=Ch(e,f,g) eor x17,x27,x27,ror#5 add x26,x26,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x22,x22,x26 // d+=h eor x19,x19,x20 // Maj(a,b,c) eor x17,x15,x17,ror#34 // Sigma0(a) add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x26,x26,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x13,x13 // 10 #endif add x26,x26,x17 // h+=Sigma0(a) ror x16,x22,#14 add x25,x25,x19 // h+=K[i] eor x0,x22,x22,ror#23 and x17,x23,x22 bic x19,x24,x22 add x25,x25,x13 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x0,ror#18 // Sigma1(e) ror x0,x26,#28 add x25,x25,x17 // h+=Ch(e,f,g) eor x17,x26,x26,ror#5 add x25,x25,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x21,x21,x25 // d+=h eor x28,x28,x27 // Maj(a,b,c) eor x17,x0,x17,ror#34 // Sigma0(a) add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x25,x25,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x14,x14 // 11 #endif ldp x15,x0,[x1],#2*8 add x25,x25,x17 // h+=Sigma0(a) str x6,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] eor x6,x21,x21,ror#23 and x17,x22,x21 bic x28,x23,x21 add x24,x24,x14 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x6,ror#18 // Sigma1(e) ror x6,x25,#28 add x24,x24,x17 // h+=Ch(e,f,g) eor x17,x25,x25,ror#5 add x24,x24,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x20,x20,x24 // d+=h eor x19,x19,x26 // Maj(a,b,c) eor x17,x6,x17,ror#34 // Sigma0(a) add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x24,x24,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x15,x15 // 12 #endif add x24,x24,x17 // h+=Sigma0(a) str x7,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] eor x7,x20,x20,ror#23 and x17,x21,x20 bic x19,x22,x20 add x23,x23,x15 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x7,ror#18 // Sigma1(e) ror x7,x24,#28 add x23,x23,x17 // h+=Ch(e,f,g) eor x17,x24,x24,ror#5 add x23,x23,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x27,x27,x23 // d+=h eor x28,x28,x25 // Maj(a,b,c) eor x17,x7,x17,ror#34 // Sigma0(a) add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x23,x23,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x0,x0 // 13 #endif ldp x1,x2,[x1] add x23,x23,x17 // h+=Sigma0(a) str x8,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] eor x8,x27,x27,ror#23 and x17,x20,x27 bic x28,x21,x27 add x22,x22,x0 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x8,ror#18 // Sigma1(e) ror x8,x23,#28 add x22,x22,x17 // h+=Ch(e,f,g) eor x17,x23,x23,ror#5 add x22,x22,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x26,x26,x22 // d+=h eor x19,x19,x24 // Maj(a,b,c) eor x17,x8,x17,ror#34 // Sigma0(a) add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x22,x22,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x1,x1 // 14 #endif ldr x6,[sp,#24] add x22,x22,x17 // h+=Sigma0(a) str x9,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] eor x9,x26,x26,ror#23 and x17,x27,x26 bic x19,x20,x26 add x21,x21,x1 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x9,ror#18 // Sigma1(e) ror x9,x22,#28 add x21,x21,x17 // h+=Ch(e,f,g) eor x17,x22,x22,ror#5 add x21,x21,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x25,x25,x21 // d+=h eor x28,x28,x23 // Maj(a,b,c) eor x17,x9,x17,ror#34 // Sigma0(a) add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x21,x21,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x2,x2 // 15 #endif ldr x7,[sp,#0] add x21,x21,x17 // h+=Sigma0(a) str x10,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x9,x4,#1 and x17,x26,x25 ror x8,x1,#19 bic x28,x27,x25 ror x10,x21,#28 add x20,x20,x2 // h+=X[i] eor x16,x16,x25,ror#18 eor x9,x9,x4,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x10,x10,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x8,x8,x1,ror#61 eor x9,x9,x4,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x10,x21,ror#39 // Sigma0(a) eor x8,x8,x1,lsr#6 // sigma1(X[i+14]) add x3,x3,x12 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x3,x3,x9 add x20,x20,x17 // h+=Sigma0(a) add x3,x3,x8 .Loop_16_xx: ldr x8,[sp,#8] str x11,[sp,#0] ror x16,x24,#14 add x27,x27,x19 // h+=K[i] ror x10,x5,#1 and x17,x25,x24 ror x9,x2,#19 bic x19,x26,x24 ror x11,x20,#28 add x27,x27,x3 // h+=X[i] eor x16,x16,x24,ror#18 eor x10,x10,x5,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x24,ror#41 // Sigma1(e) eor x11,x11,x20,ror#34 add x27,x27,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x9,x9,x2,ror#61 eor x10,x10,x5,lsr#7 // sigma0(X[i+1]) add x27,x27,x16 // h+=Sigma1(e) eor x28,x28,x21 // Maj(a,b,c) eor x17,x11,x20,ror#39 // Sigma0(a) eor x9,x9,x2,lsr#6 // sigma1(X[i+14]) add x4,x4,x13 add x23,x23,x27 // d+=h add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x4,x4,x10 add x27,x27,x17 // h+=Sigma0(a) add x4,x4,x9 ldr x9,[sp,#16] str x12,[sp,#8] ror x16,x23,#14 add x26,x26,x28 // h+=K[i] ror x11,x6,#1 and x17,x24,x23 ror x10,x3,#19 bic x28,x25,x23 ror x12,x27,#28 add x26,x26,x4 // h+=X[i] eor x16,x16,x23,ror#18 eor x11,x11,x6,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x23,ror#41 // Sigma1(e) eor x12,x12,x27,ror#34 add x26,x26,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x10,x10,x3,ror#61 eor x11,x11,x6,lsr#7 // sigma0(X[i+1]) add x26,x26,x16 // h+=Sigma1(e) eor x19,x19,x20 // Maj(a,b,c) eor x17,x12,x27,ror#39 // Sigma0(a) eor x10,x10,x3,lsr#6 // sigma1(X[i+14]) add x5,x5,x14 add x22,x22,x26 // d+=h add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x5,x5,x11 add x26,x26,x17 // h+=Sigma0(a) add x5,x5,x10 ldr x10,[sp,#24] str x13,[sp,#16] ror x16,x22,#14 add x25,x25,x19 // h+=K[i] ror x12,x7,#1 and x17,x23,x22 ror x11,x4,#19 bic x19,x24,x22 ror x13,x26,#28 add x25,x25,x5 // h+=X[i] eor x16,x16,x22,ror#18 eor x12,x12,x7,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x22,ror#41 // Sigma1(e) eor x13,x13,x26,ror#34 add x25,x25,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x11,x11,x4,ror#61 eor x12,x12,x7,lsr#7 // sigma0(X[i+1]) add x25,x25,x16 // h+=Sigma1(e) eor x28,x28,x27 // Maj(a,b,c) eor x17,x13,x26,ror#39 // Sigma0(a) eor x11,x11,x4,lsr#6 // sigma1(X[i+14]) add x6,x6,x15 add x21,x21,x25 // d+=h add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x6,x6,x12 add x25,x25,x17 // h+=Sigma0(a) add x6,x6,x11 ldr x11,[sp,#0] str x14,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] ror x13,x8,#1 and x17,x22,x21 ror x12,x5,#19 bic x28,x23,x21 ror x14,x25,#28 add x24,x24,x6 // h+=X[i] eor x16,x16,x21,ror#18 eor x13,x13,x8,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x21,ror#41 // Sigma1(e) eor x14,x14,x25,ror#34 add x24,x24,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x12,x12,x5,ror#61 eor x13,x13,x8,lsr#7 // sigma0(X[i+1]) add x24,x24,x16 // h+=Sigma1(e) eor x19,x19,x26 // Maj(a,b,c) eor x17,x14,x25,ror#39 // Sigma0(a) eor x12,x12,x5,lsr#6 // sigma1(X[i+14]) add x7,x7,x0 add x20,x20,x24 // d+=h add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x7,x7,x13 add x24,x24,x17 // h+=Sigma0(a) add x7,x7,x12 ldr x12,[sp,#8] str x15,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] ror x14,x9,#1 and x17,x21,x20 ror x13,x6,#19 bic x19,x22,x20 ror x15,x24,#28 add x23,x23,x7 // h+=X[i] eor x16,x16,x20,ror#18 eor x14,x14,x9,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x20,ror#41 // Sigma1(e) eor x15,x15,x24,ror#34 add x23,x23,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x13,x13,x6,ror#61 eor x14,x14,x9,lsr#7 // sigma0(X[i+1]) add x23,x23,x16 // h+=Sigma1(e) eor x28,x28,x25 // Maj(a,b,c) eor x17,x15,x24,ror#39 // Sigma0(a) eor x13,x13,x6,lsr#6 // sigma1(X[i+14]) add x8,x8,x1 add x27,x27,x23 // d+=h add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x8,x8,x14 add x23,x23,x17 // h+=Sigma0(a) add x8,x8,x13 ldr x13,[sp,#16] str x0,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] ror x15,x10,#1 and x17,x20,x27 ror x14,x7,#19 bic x28,x21,x27 ror x0,x23,#28 add x22,x22,x8 // h+=X[i] eor x16,x16,x27,ror#18 eor x15,x15,x10,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x27,ror#41 // Sigma1(e) eor x0,x0,x23,ror#34 add x22,x22,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x14,x14,x7,ror#61 eor x15,x15,x10,lsr#7 // sigma0(X[i+1]) add x22,x22,x16 // h+=Sigma1(e) eor x19,x19,x24 // Maj(a,b,c) eor x17,x0,x23,ror#39 // Sigma0(a) eor x14,x14,x7,lsr#6 // sigma1(X[i+14]) add x9,x9,x2 add x26,x26,x22 // d+=h add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x9,x9,x15 add x22,x22,x17 // h+=Sigma0(a) add x9,x9,x14 ldr x14,[sp,#24] str x1,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] ror x0,x11,#1 and x17,x27,x26 ror x15,x8,#19 bic x19,x20,x26 ror x1,x22,#28 add x21,x21,x9 // h+=X[i] eor x16,x16,x26,ror#18 eor x0,x0,x11,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x26,ror#41 // Sigma1(e) eor x1,x1,x22,ror#34 add x21,x21,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x15,x15,x8,ror#61 eor x0,x0,x11,lsr#7 // sigma0(X[i+1]) add x21,x21,x16 // h+=Sigma1(e) eor x28,x28,x23 // Maj(a,b,c) eor x17,x1,x22,ror#39 // Sigma0(a) eor x15,x15,x8,lsr#6 // sigma1(X[i+14]) add x10,x10,x3 add x25,x25,x21 // d+=h add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x10,x10,x0 add x21,x21,x17 // h+=Sigma0(a) add x10,x10,x15 ldr x15,[sp,#0] str x2,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x1,x12,#1 and x17,x26,x25 ror x0,x9,#19 bic x28,x27,x25 ror x2,x21,#28 add x20,x20,x10 // h+=X[i] eor x16,x16,x25,ror#18 eor x1,x1,x12,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x2,x2,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x0,x0,x9,ror#61 eor x1,x1,x12,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x2,x21,ror#39 // Sigma0(a) eor x0,x0,x9,lsr#6 // sigma1(X[i+14]) add x11,x11,x4 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x11,x11,x1 add x20,x20,x17 // h+=Sigma0(a) add x11,x11,x0 ldr x0,[sp,#8] str x3,[sp,#0] ror x16,x24,#14 add x27,x27,x19 // h+=K[i] ror x2,x13,#1 and x17,x25,x24 ror x1,x10,#19 bic x19,x26,x24 ror x3,x20,#28 add x27,x27,x11 // h+=X[i] eor x16,x16,x24,ror#18 eor x2,x2,x13,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x24,ror#41 // Sigma1(e) eor x3,x3,x20,ror#34 add x27,x27,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x1,x1,x10,ror#61 eor x2,x2,x13,lsr#7 // sigma0(X[i+1]) add x27,x27,x16 // h+=Sigma1(e) eor x28,x28,x21 // Maj(a,b,c) eor x17,x3,x20,ror#39 // Sigma0(a) eor x1,x1,x10,lsr#6 // sigma1(X[i+14]) add x12,x12,x5 add x23,x23,x27 // d+=h add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x12,x12,x2 add x27,x27,x17 // h+=Sigma0(a) add x12,x12,x1 ldr x1,[sp,#16] str x4,[sp,#8] ror x16,x23,#14 add x26,x26,x28 // h+=K[i] ror x3,x14,#1 and x17,x24,x23 ror x2,x11,#19 bic x28,x25,x23 ror x4,x27,#28 add x26,x26,x12 // h+=X[i] eor x16,x16,x23,ror#18 eor x3,x3,x14,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x23,ror#41 // Sigma1(e) eor x4,x4,x27,ror#34 add x26,x26,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x2,x2,x11,ror#61 eor x3,x3,x14,lsr#7 // sigma0(X[i+1]) add x26,x26,x16 // h+=Sigma1(e) eor x19,x19,x20 // Maj(a,b,c) eor x17,x4,x27,ror#39 // Sigma0(a) eor x2,x2,x11,lsr#6 // sigma1(X[i+14]) add x13,x13,x6 add x22,x22,x26 // d+=h add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x13,x13,x3 add x26,x26,x17 // h+=Sigma0(a) add x13,x13,x2 ldr x2,[sp,#24] str x5,[sp,#16] ror x16,x22,#14 add x25,x25,x19 // h+=K[i] ror x4,x15,#1 and x17,x23,x22 ror x3,x12,#19 bic x19,x24,x22 ror x5,x26,#28 add x25,x25,x13 // h+=X[i] eor x16,x16,x22,ror#18 eor x4,x4,x15,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x22,ror#41 // Sigma1(e) eor x5,x5,x26,ror#34 add x25,x25,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x3,x3,x12,ror#61 eor x4,x4,x15,lsr#7 // sigma0(X[i+1]) add x25,x25,x16 // h+=Sigma1(e) eor x28,x28,x27 // Maj(a,b,c) eor x17,x5,x26,ror#39 // Sigma0(a) eor x3,x3,x12,lsr#6 // sigma1(X[i+14]) add x14,x14,x7 add x21,x21,x25 // d+=h add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x14,x14,x4 add x25,x25,x17 // h+=Sigma0(a) add x14,x14,x3 ldr x3,[sp,#0] str x6,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] ror x5,x0,#1 and x17,x22,x21 ror x4,x13,#19 bic x28,x23,x21 ror x6,x25,#28 add x24,x24,x14 // h+=X[i] eor x16,x16,x21,ror#18 eor x5,x5,x0,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x21,ror#41 // Sigma1(e) eor x6,x6,x25,ror#34 add x24,x24,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x4,x4,x13,ror#61 eor x5,x5,x0,lsr#7 // sigma0(X[i+1]) add x24,x24,x16 // h+=Sigma1(e) eor x19,x19,x26 // Maj(a,b,c) eor x17,x6,x25,ror#39 // Sigma0(a) eor x4,x4,x13,lsr#6 // sigma1(X[i+14]) add x15,x15,x8 add x20,x20,x24 // d+=h add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x15,x15,x5 add x24,x24,x17 // h+=Sigma0(a) add x15,x15,x4 ldr x4,[sp,#8] str x7,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] ror x6,x1,#1 and x17,x21,x20 ror x5,x14,#19 bic x19,x22,x20 ror x7,x24,#28 add x23,x23,x15 // h+=X[i] eor x16,x16,x20,ror#18 eor x6,x6,x1,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x20,ror#41 // Sigma1(e) eor x7,x7,x24,ror#34 add x23,x23,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x5,x5,x14,ror#61 eor x6,x6,x1,lsr#7 // sigma0(X[i+1]) add x23,x23,x16 // h+=Sigma1(e) eor x28,x28,x25 // Maj(a,b,c) eor x17,x7,x24,ror#39 // Sigma0(a) eor x5,x5,x14,lsr#6 // sigma1(X[i+14]) add x0,x0,x9 add x27,x27,x23 // d+=h add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x0,x0,x6 add x23,x23,x17 // h+=Sigma0(a) add x0,x0,x5 ldr x5,[sp,#16] str x8,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] ror x7,x2,#1 and x17,x20,x27 ror x6,x15,#19 bic x28,x21,x27 ror x8,x23,#28 add x22,x22,x0 // h+=X[i] eor x16,x16,x27,ror#18 eor x7,x7,x2,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x27,ror#41 // Sigma1(e) eor x8,x8,x23,ror#34 add x22,x22,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x6,x6,x15,ror#61 eor x7,x7,x2,lsr#7 // sigma0(X[i+1]) add x22,x22,x16 // h+=Sigma1(e) eor x19,x19,x24 // Maj(a,b,c) eor x17,x8,x23,ror#39 // Sigma0(a) eor x6,x6,x15,lsr#6 // sigma1(X[i+14]) add x1,x1,x10 add x26,x26,x22 // d+=h add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x1,x1,x7 add x22,x22,x17 // h+=Sigma0(a) add x1,x1,x6 ldr x6,[sp,#24] str x9,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] ror x8,x3,#1 and x17,x27,x26 ror x7,x0,#19 bic x19,x20,x26 ror x9,x22,#28 add x21,x21,x1 // h+=X[i] eor x16,x16,x26,ror#18 eor x8,x8,x3,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x26,ror#41 // Sigma1(e) eor x9,x9,x22,ror#34 add x21,x21,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x7,x7,x0,ror#61 eor x8,x8,x3,lsr#7 // sigma0(X[i+1]) add x21,x21,x16 // h+=Sigma1(e) eor x28,x28,x23 // Maj(a,b,c) eor x17,x9,x22,ror#39 // Sigma0(a) eor x7,x7,x0,lsr#6 // sigma1(X[i+14]) add x2,x2,x11 add x25,x25,x21 // d+=h add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x2,x2,x8 add x21,x21,x17 // h+=Sigma0(a) add x2,x2,x7 ldr x7,[sp,#0] str x10,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x9,x4,#1 and x17,x26,x25 ror x8,x1,#19 bic x28,x27,x25 ror x10,x21,#28 add x20,x20,x2 // h+=X[i] eor x16,x16,x25,ror#18 eor x9,x9,x4,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x10,x10,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x8,x8,x1,ror#61 eor x9,x9,x4,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x10,x21,ror#39 // Sigma0(a) eor x8,x8,x1,lsr#6 // sigma1(X[i+14]) add x3,x3,x12 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x3,x3,x9 add x20,x20,x17 // h+=Sigma0(a) add x3,x3,x8 cbnz x19,.Loop_16_xx ldp x0,x2,[x29,#96] ldr x1,[x29,#112] sub x30,x30,#648 // rewind ldp x3,x4,[x0] ldp x5,x6,[x0,#2*8] add x1,x1,#14*8 // advance input pointer ldp x7,x8,[x0,#4*8] add x20,x20,x3 ldp x9,x10,[x0,#6*8] add x21,x21,x4 add x22,x22,x5 add x23,x23,x6 stp x20,x21,[x0] add x24,x24,x7 add x25,x25,x8 stp x22,x23,[x0,#2*8] add x26,x26,x9 add x27,x27,x10 cmp x1,x2 stp x24,x25,[x0,#4*8] stp x26,x27,[x0,#6*8] b.ne .Loop ldp x19,x20,[x29,#16] add sp,sp,#4*8 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .size sha512_block_data_order_nohw,.-sha512_block_data_order_nohw .section .rodata .align 6 .type .LK512,%object .LK512: .quad 0x428a2f98d728ae22,0x7137449123ef65cd .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc .quad 0x3956c25bf348b538,0x59f111f1b605d019 .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 .quad 0xd807aa98a3030242,0x12835b0145706fbe .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 .quad 0x9bdc06a725c71235,0xc19bf174cf692694 .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 .quad 0x983e5152ee66dfab,0xa831c66d2db43210 .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 .quad 0x06ca6351e003826f,0x142929670a0e6e70 .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df .quad 0x650a73548baf63de,0x766a0abb3c77b2a8 .quad 0x81c2c92e47edaee6,0x92722c851482353b .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 .quad 0xc24b8b70d0f89791,0xc76c51a30654be30 .quad 0xd192e819d6ef5218,0xd69906245565a910 .quad 0xf40e35855771202a,0x106aa07032bbd1b8 .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 .quad 0x748f82ee5defb2fc,0x78a5636f43172f60 .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec .quad 0x90befffa23631e28,0xa4506cebde82bde9 .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b .quad 0xca273eceea26619c,0xd186b8c721c0c207 .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 .quad 0x113f9804bef90dae,0x1b710b35131c471b .quad 0x28db77f523047d84,0x32caab7b40c72493 .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 .quad 0 // terminator .size .LK512,.-.LK512 .byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 .text #ifndef __KERNEL__ .globl sha512_block_data_order_hw .hidden sha512_block_data_order_hw .type sha512_block_data_order_hw,%function .align 6 sha512_block_data_order_hw: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#8] // kFlag_sha512_hw #endif // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 ld1 {v16.16b,v17.16b,v18.16b,v19.16b},[x1],#64 // load input ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 ld1 {v0.2d,v1.2d,v2.2d,v3.2d},[x0] // load context adrp x3,.LK512 add x3,x3,:lo12:.LK512 rev64 v16.16b,v16.16b rev64 v17.16b,v17.16b rev64 v18.16b,v18.16b rev64 v19.16b,v19.16b rev64 v20.16b,v20.16b rev64 v21.16b,v21.16b rev64 v22.16b,v22.16b rev64 v23.16b,v23.16b b .Loop_hw .align 4 .Loop_hw: ld1 {v24.2d},[x3],#16 subs x2,x2,#1 sub x4,x1,#128 orr v26.16b,v0.16b,v0.16b // offload orr v27.16b,v1.16b,v1.16b orr v28.16b,v2.16b,v2.16b orr v29.16b,v3.16b,v3.16b csel x1,x1,x4,ne // conditional rewind add v24.2d,v24.2d,v16.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec08230 //sha512su0 v16.16b,v17.16b ext v7.16b,v20.16b,v21.16b,#8 .inst 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .inst 0xce678af0 //sha512su1 v16.16b,v23.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .inst 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v25.2d,v25.2d,v17.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08251 //sha512su0 v17.16b,v18.16b ext v7.16b,v21.16b,v22.16b,#8 .inst 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .inst 0xce678a11 //sha512su1 v17.16b,v16.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .inst 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v24.2d,v24.2d,v18.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec08272 //sha512su0 v18.16b,v19.16b ext v7.16b,v22.16b,v23.16b,#8 .inst 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .inst 0xce678a32 //sha512su1 v18.16b,v17.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .inst 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v25.2d,v25.2d,v19.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08293 //sha512su0 v19.16b,v20.16b ext v7.16b,v23.16b,v16.16b,#8 .inst 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .inst 0xce678a53 //sha512su1 v19.16b,v18.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .inst 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v24.2d,v24.2d,v20.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec082b4 //sha512su0 v20.16b,v21.16b ext v7.16b,v16.16b,v17.16b,#8 .inst 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .inst 0xce678a74 //sha512su1 v20.16b,v19.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .inst 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v25.2d,v25.2d,v21.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec082d5 //sha512su0 v21.16b,v22.16b ext v7.16b,v17.16b,v18.16b,#8 .inst 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .inst 0xce678a95 //sha512su1 v21.16b,v20.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .inst 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v24.2d,v24.2d,v22.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec082f6 //sha512su0 v22.16b,v23.16b ext v7.16b,v18.16b,v19.16b,#8 .inst 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .inst 0xce678ab6 //sha512su1 v22.16b,v21.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .inst 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v25.2d,v25.2d,v23.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08217 //sha512su0 v23.16b,v16.16b ext v7.16b,v19.16b,v20.16b,#8 .inst 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .inst 0xce678ad7 //sha512su1 v23.16b,v22.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .inst 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v24.2d,v24.2d,v16.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec08230 //sha512su0 v16.16b,v17.16b ext v7.16b,v20.16b,v21.16b,#8 .inst 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .inst 0xce678af0 //sha512su1 v16.16b,v23.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .inst 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v25.2d,v25.2d,v17.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08251 //sha512su0 v17.16b,v18.16b ext v7.16b,v21.16b,v22.16b,#8 .inst 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .inst 0xce678a11 //sha512su1 v17.16b,v16.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .inst 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v24.2d,v24.2d,v18.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec08272 //sha512su0 v18.16b,v19.16b ext v7.16b,v22.16b,v23.16b,#8 .inst 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .inst 0xce678a32 //sha512su1 v18.16b,v17.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .inst 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v25.2d,v25.2d,v19.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08293 //sha512su0 v19.16b,v20.16b ext v7.16b,v23.16b,v16.16b,#8 .inst 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .inst 0xce678a53 //sha512su1 v19.16b,v18.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .inst 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v24.2d,v24.2d,v20.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec082b4 //sha512su0 v20.16b,v21.16b ext v7.16b,v16.16b,v17.16b,#8 .inst 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .inst 0xce678a74 //sha512su1 v20.16b,v19.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .inst 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v25.2d,v25.2d,v21.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec082d5 //sha512su0 v21.16b,v22.16b ext v7.16b,v17.16b,v18.16b,#8 .inst 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .inst 0xce678a95 //sha512su1 v21.16b,v20.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .inst 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v24.2d,v24.2d,v22.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec082f6 //sha512su0 v22.16b,v23.16b ext v7.16b,v18.16b,v19.16b,#8 .inst 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .inst 0xce678ab6 //sha512su1 v22.16b,v21.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .inst 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v25.2d,v25.2d,v23.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08217 //sha512su0 v23.16b,v16.16b ext v7.16b,v19.16b,v20.16b,#8 .inst 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .inst 0xce678ad7 //sha512su1 v23.16b,v22.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .inst 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v24.2d,v24.2d,v16.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec08230 //sha512su0 v16.16b,v17.16b ext v7.16b,v20.16b,v21.16b,#8 .inst 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .inst 0xce678af0 //sha512su1 v16.16b,v23.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .inst 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v25.2d,v25.2d,v17.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08251 //sha512su0 v17.16b,v18.16b ext v7.16b,v21.16b,v22.16b,#8 .inst 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .inst 0xce678a11 //sha512su1 v17.16b,v16.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .inst 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v24.2d,v24.2d,v18.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec08272 //sha512su0 v18.16b,v19.16b ext v7.16b,v22.16b,v23.16b,#8 .inst 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .inst 0xce678a32 //sha512su1 v18.16b,v17.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .inst 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v25.2d,v25.2d,v19.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08293 //sha512su0 v19.16b,v20.16b ext v7.16b,v23.16b,v16.16b,#8 .inst 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .inst 0xce678a53 //sha512su1 v19.16b,v18.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .inst 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v24.2d,v24.2d,v20.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec082b4 //sha512su0 v20.16b,v21.16b ext v7.16b,v16.16b,v17.16b,#8 .inst 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .inst 0xce678a74 //sha512su1 v20.16b,v19.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .inst 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v25.2d,v25.2d,v21.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec082d5 //sha512su0 v21.16b,v22.16b ext v7.16b,v17.16b,v18.16b,#8 .inst 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .inst 0xce678a95 //sha512su1 v21.16b,v20.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .inst 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v24.2d,v24.2d,v22.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec082f6 //sha512su0 v22.16b,v23.16b ext v7.16b,v18.16b,v19.16b,#8 .inst 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .inst 0xce678ab6 //sha512su1 v22.16b,v21.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .inst 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v25.2d,v25.2d,v23.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08217 //sha512su0 v23.16b,v16.16b ext v7.16b,v19.16b,v20.16b,#8 .inst 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .inst 0xce678ad7 //sha512su1 v23.16b,v22.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .inst 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v24.2d,v24.2d,v16.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec08230 //sha512su0 v16.16b,v17.16b ext v7.16b,v20.16b,v21.16b,#8 .inst 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .inst 0xce678af0 //sha512su1 v16.16b,v23.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .inst 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v25.2d,v25.2d,v17.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08251 //sha512su0 v17.16b,v18.16b ext v7.16b,v21.16b,v22.16b,#8 .inst 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .inst 0xce678a11 //sha512su1 v17.16b,v16.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .inst 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v24.2d,v24.2d,v18.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec08272 //sha512su0 v18.16b,v19.16b ext v7.16b,v22.16b,v23.16b,#8 .inst 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .inst 0xce678a32 //sha512su1 v18.16b,v17.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .inst 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v25.2d,v25.2d,v19.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08293 //sha512su0 v19.16b,v20.16b ext v7.16b,v23.16b,v16.16b,#8 .inst 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .inst 0xce678a53 //sha512su1 v19.16b,v18.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .inst 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v24.2d,v24.2d,v20.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec082b4 //sha512su0 v20.16b,v21.16b ext v7.16b,v16.16b,v17.16b,#8 .inst 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .inst 0xce678a74 //sha512su1 v20.16b,v19.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .inst 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v25.2d,v25.2d,v21.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec082d5 //sha512su0 v21.16b,v22.16b ext v7.16b,v17.16b,v18.16b,#8 .inst 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .inst 0xce678a95 //sha512su1 v21.16b,v20.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .inst 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v24.2d,v24.2d,v22.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v24.2d // "T1 + H + K512[i]" .inst 0xcec082f6 //sha512su0 v22.16b,v23.16b ext v7.16b,v18.16b,v19.16b,#8 .inst 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .inst 0xce678ab6 //sha512su1 v22.16b,v21.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .inst 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v25.2d,v25.2d,v23.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v25.2d // "T1 + H + K512[i]" .inst 0xcec08217 //sha512su0 v23.16b,v16.16b ext v7.16b,v19.16b,v20.16b,#8 .inst 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .inst 0xce678ad7 //sha512su1 v23.16b,v22.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .inst 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b ld1 {v25.2d},[x3],#16 add v24.2d,v24.2d,v16.2d ld1 {v16.16b},[x1],#16 // load next input ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v24.2d // "T1 + H + K512[i]" .inst 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b rev64 v16.16b,v16.16b add v0.2d,v3.2d,v4.2d // "D + T1" .inst 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b ld1 {v24.2d},[x3],#16 add v25.2d,v25.2d,v17.2d ld1 {v17.16b},[x1],#16 // load next input ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v25.2d // "T1 + H + K512[i]" .inst 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b rev64 v17.16b,v17.16b add v3.2d,v2.2d,v1.2d // "D + T1" .inst 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b ld1 {v25.2d},[x3],#16 add v24.2d,v24.2d,v18.2d ld1 {v18.16b},[x1],#16 // load next input ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v24.2d // "T1 + H + K512[i]" .inst 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b rev64 v18.16b,v18.16b add v2.2d,v4.2d,v0.2d // "D + T1" .inst 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b ld1 {v24.2d},[x3],#16 add v25.2d,v25.2d,v19.2d ld1 {v19.16b},[x1],#16 // load next input ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v25.2d // "T1 + H + K512[i]" .inst 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b rev64 v19.16b,v19.16b add v4.2d,v1.2d,v3.2d // "D + T1" .inst 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b ld1 {v25.2d},[x3],#16 add v24.2d,v24.2d,v20.2d ld1 {v20.16b},[x1],#16 // load next input ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v24.2d // "T1 + H + K512[i]" .inst 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b rev64 v20.16b,v20.16b add v1.2d,v0.2d,v2.2d // "D + T1" .inst 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b ld1 {v24.2d},[x3],#16 add v25.2d,v25.2d,v21.2d ld1 {v21.16b},[x1],#16 // load next input ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v25.2d // "T1 + H + K512[i]" .inst 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b rev64 v21.16b,v21.16b add v0.2d,v3.2d,v4.2d // "D + T1" .inst 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b ld1 {v25.2d},[x3],#16 add v24.2d,v24.2d,v22.2d ld1 {v22.16b},[x1],#16 // load next input ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v24.2d // "T1 + H + K512[i]" .inst 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b rev64 v22.16b,v22.16b add v3.2d,v2.2d,v1.2d // "D + T1" .inst 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b sub x3,x3,#80*8 // rewind add v25.2d,v25.2d,v23.2d ld1 {v23.16b},[x1],#16 // load next input ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v25.2d // "T1 + H + K512[i]" .inst 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b rev64 v23.16b,v23.16b add v2.2d,v4.2d,v0.2d // "D + T1" .inst 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v0.2d,v0.2d,v26.2d // accumulate add v1.2d,v1.2d,v27.2d add v2.2d,v2.2d,v28.2d add v3.2d,v3.2d,v29.2d cbnz x2,.Loop_hw st1 {v0.2d,v1.2d,v2.2d,v3.2d},[x0] // store context ldr x29,[sp],#16 ret .size sha512_block_data_order_hw,.-sha512_block_data_order_hw #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
17,485
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/ghashv8-armx.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> #if __ARM_MAX_ARCH__>=7 .text .arch armv8-a+crypto .globl gcm_init_v8 .hidden gcm_init_v8 .type gcm_init_v8,%function .align 4 gcm_init_v8: AARCH64_VALID_CALL_TARGET ld1 {v17.2d},[x1] //load input H movi v19.16b,#0xe1 shl v19.2d,v19.2d,#57 //0xc2.0 ext v3.16b,v17.16b,v17.16b,#8 ushr v18.2d,v19.2d,#63 dup v17.4s,v17.s[1] ext v16.16b,v18.16b,v19.16b,#8 //t0=0xc2....01 ushr v18.2d,v3.2d,#63 sshr v17.4s,v17.4s,#31 //broadcast carry bit and v18.16b,v18.16b,v16.16b shl v3.2d,v3.2d,#1 ext v18.16b,v18.16b,v18.16b,#8 and v16.16b,v16.16b,v17.16b orr v3.16b,v3.16b,v18.16b //H<<<=1 eor v20.16b,v3.16b,v16.16b //twisted H ext v20.16b, v20.16b, v20.16b, #8 st1 {v20.2d},[x0],#16 //store Htable[0] //calculate H^2 ext v16.16b,v20.16b,v20.16b,#8 //Karatsuba pre-processing pmull2 v0.1q,v20.2d,v20.2d eor v16.16b,v16.16b,v20.16b pmull v2.1q,v20.1d,v20.1d pmull v1.1q,v16.1d,v16.1d ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v17.16b,v0.16b,v18.16b ext v22.16b,v17.16b,v17.16b,#8 //Karatsuba pre-processing eor v17.16b,v17.16b,v22.16b ext v21.16b,v16.16b,v17.16b,#8 //pack Karatsuba pre-processed st1 {v21.2d},[x0],#16 //store Htable[1..2] st1 {v22.2d},[x0],#16 //store Htable[1..2] //calculate H^3 and H^4 pmull2 v0.1q,v20.2d, v22.2d pmull2 v5.1q,v22.2d,v22.2d pmull v2.1q,v20.1d, v22.1d pmull v7.1q,v22.1d,v22.1d pmull v1.1q,v16.1d,v17.1d pmull v6.1q,v17.1d,v17.1d ext v16.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing ext v17.16b,v5.16b,v7.16b,#8 eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v16.16b eor v4.16b,v5.16b,v7.16b eor v6.16b,v6.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase eor v6.16b,v6.16b,v4.16b pmull v4.1q,v5.1d,v19.1d ins v2.d[0],v1.d[1] ins v7.d[0],v6.d[1] ins v1.d[1],v0.d[0] ins v6.d[1],v5.d[0] eor v0.16b,v1.16b,v18.16b eor v5.16b,v6.16b,v4.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase ext v4.16b,v5.16b,v5.16b,#8 pmull v0.1q,v0.1d,v19.1d pmull v5.1q,v5.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v4.16b,v4.16b,v7.16b eor v16.16b, v0.16b,v18.16b //H^3 eor v17.16b, v5.16b,v4.16b //H^4 ext v23.16b,v16.16b,v16.16b,#8 //Karatsuba pre-processing ext v25.16b,v17.16b,v17.16b,#8 ext v18.16b,v22.16b,v22.16b,#8 eor v16.16b,v16.16b,v23.16b eor v17.16b,v17.16b,v25.16b eor v18.16b,v18.16b,v22.16b ext v24.16b,v16.16b,v17.16b,#8 //pack Karatsuba pre-processed st1 {v23.2d,v24.2d,v25.2d},[x0],#48 //store Htable[3..5] //calculate H^5 and H^6 pmull2 v0.1q,v22.2d, v23.2d pmull2 v5.1q,v23.2d,v23.2d pmull v2.1q,v22.1d, v23.1d pmull v7.1q,v23.1d,v23.1d pmull v1.1q,v16.1d,v18.1d pmull v6.1q,v16.1d,v16.1d ext v16.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing ext v17.16b,v5.16b,v7.16b,#8 eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v16.16b eor v4.16b,v5.16b,v7.16b eor v6.16b,v6.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase eor v6.16b,v6.16b,v4.16b pmull v4.1q,v5.1d,v19.1d ins v2.d[0],v1.d[1] ins v7.d[0],v6.d[1] ins v1.d[1],v0.d[0] ins v6.d[1],v5.d[0] eor v0.16b,v1.16b,v18.16b eor v5.16b,v6.16b,v4.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase ext v4.16b,v5.16b,v5.16b,#8 pmull v0.1q,v0.1d,v19.1d pmull v5.1q,v5.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v4.16b,v4.16b,v7.16b eor v16.16b,v0.16b,v18.16b //H^5 eor v17.16b,v5.16b,v4.16b //H^6 ext v26.16b, v16.16b, v16.16b,#8 //Karatsuba pre-processing ext v28.16b, v17.16b, v17.16b,#8 ext v18.16b,v22.16b,v22.16b,#8 eor v16.16b,v16.16b,v26.16b eor v17.16b,v17.16b,v28.16b eor v18.16b,v18.16b,v22.16b ext v27.16b,v16.16b,v17.16b,#8 //pack Karatsuba pre-processed st1 {v26.2d,v27.2d,v28.2d},[x0],#48 //store Htable[6..8] //calculate H^7 and H^8 pmull2 v0.1q,v22.2d,v26.2d pmull2 v5.1q,v22.2d,v28.2d pmull v2.1q,v22.1d,v26.1d pmull v7.1q,v22.1d,v28.1d pmull v1.1q,v16.1d,v18.1d pmull v6.1q,v17.1d,v18.1d ext v16.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing ext v17.16b,v5.16b,v7.16b,#8 eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v16.16b eor v4.16b,v5.16b,v7.16b eor v6.16b,v6.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase eor v6.16b,v6.16b,v4.16b pmull v4.1q,v5.1d,v19.1d ins v2.d[0],v1.d[1] ins v7.d[0],v6.d[1] ins v1.d[1],v0.d[0] ins v6.d[1],v5.d[0] eor v0.16b,v1.16b,v18.16b eor v5.16b,v6.16b,v4.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase ext v4.16b,v5.16b,v5.16b,#8 pmull v0.1q,v0.1d,v19.1d pmull v5.1q,v5.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v4.16b,v4.16b,v7.16b eor v16.16b,v0.16b,v18.16b //H^7 eor v17.16b,v5.16b,v4.16b //H^8 ext v29.16b,v16.16b,v16.16b,#8 //Karatsuba pre-processing ext v31.16b,v17.16b,v17.16b,#8 eor v16.16b,v16.16b,v29.16b eor v17.16b,v17.16b,v31.16b ext v30.16b,v16.16b,v17.16b,#8 //pack Karatsuba pre-processed st1 {v29.2d,v30.2d,v31.2d},[x0] //store Htable[9..11] ret .size gcm_init_v8,.-gcm_init_v8 .globl gcm_gmult_v8 .hidden gcm_gmult_v8 .type gcm_gmult_v8,%function .align 4 gcm_gmult_v8: AARCH64_VALID_CALL_TARGET ld1 {v17.2d},[x0] //load Xi movi v19.16b,#0xe1 ld1 {v20.2d,v21.2d},[x1] //load twisted H, ... ext v20.16b,v20.16b,v20.16b,#8 shl v19.2d,v19.2d,#57 #ifndef __AARCH64EB__ rev64 v17.16b,v17.16b #endif ext v3.16b,v17.16b,v17.16b,#8 pmull v0.1q,v20.1d,v3.1d //H.lo·Xi.lo eor v17.16b,v17.16b,v3.16b //Karatsuba pre-processing pmull2 v2.1q,v20.2d,v3.2d //H.hi·Xi.hi pmull v1.1q,v21.1d,v17.1d //(H.lo+H.hi)·(Xi.lo+Xi.hi) ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b #ifndef __AARCH64EB__ rev64 v0.16b,v0.16b #endif ext v0.16b,v0.16b,v0.16b,#8 st1 {v0.2d},[x0] //write out Xi ret .size gcm_gmult_v8,.-gcm_gmult_v8 .globl gcm_ghash_v8 .hidden gcm_ghash_v8 .type gcm_ghash_v8,%function .align 4 gcm_ghash_v8: AARCH64_VALID_CALL_TARGET cmp x3,#64 b.hs .Lgcm_ghash_v8_4x ld1 {v0.2d},[x0] //load [rotated] Xi //"[rotated]" means that //loaded value would have //to be rotated in order to //make it appear as in //algorithm specification subs x3,x3,#32 //see if x3 is 32 or larger mov x12,#16 //x12 is used as post- //increment for input pointer; //as loop is modulo-scheduled //x12 is zeroed just in time //to preclude overstepping //inp[len], which means that //last block[s] are actually //loaded twice, but last //copy is not processed ld1 {v20.2d,v21.2d},[x1],#32 //load twisted H, ..., H^2 ext v20.16b,v20.16b,v20.16b,#8 movi v19.16b,#0xe1 ld1 {v22.2d},[x1] ext v22.16b,v22.16b,v22.16b,#8 csel x12,xzr,x12,eq //is it time to zero x12? ext v0.16b,v0.16b,v0.16b,#8 //rotate Xi ld1 {v16.2d},[x2],#16 //load [rotated] I[0] shl v19.2d,v19.2d,#57 //compose 0xc2.0 constant #ifndef __AARCH64EB__ rev64 v16.16b,v16.16b rev64 v0.16b,v0.16b #endif ext v3.16b,v16.16b,v16.16b,#8 //rotate I[0] b.lo .Lodd_tail_v8 //x3 was less than 32 ld1 {v17.2d},[x2],x12 //load [rotated] I[1] #ifndef __AARCH64EB__ rev64 v17.16b,v17.16b #endif ext v7.16b,v17.16b,v17.16b,#8 eor v3.16b,v3.16b,v0.16b //I[i]^=Xi pmull v4.1q,v20.1d,v7.1d //H·Ii+1 eor v17.16b,v17.16b,v7.16b //Karatsuba pre-processing pmull2 v6.1q,v20.2d,v7.2d b .Loop_mod2x_v8 .align 4 .Loop_mod2x_v8: ext v18.16b,v3.16b,v3.16b,#8 subs x3,x3,#32 //is there more data? pmull v0.1q,v22.1d,v3.1d //H^2.lo·Xi.lo csel x12,xzr,x12,lo //is it time to zero x12? pmull v5.1q,v21.1d,v17.1d eor v18.16b,v18.16b,v3.16b //Karatsuba pre-processing pmull2 v2.1q,v22.2d,v3.2d //H^2.hi·Xi.hi eor v0.16b,v0.16b,v4.16b //accumulate pmull2 v1.1q,v21.2d,v18.2d //(H^2.lo+H^2.hi)·(Xi.lo+Xi.hi) ld1 {v16.2d},[x2],x12 //load [rotated] I[i+2] eor v2.16b,v2.16b,v6.16b csel x12,xzr,x12,eq //is it time to zero x12? eor v1.16b,v1.16b,v5.16b ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b ld1 {v17.2d},[x2],x12 //load [rotated] I[i+3] #ifndef __AARCH64EB__ rev64 v16.16b,v16.16b #endif eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase of reduction #ifndef __AARCH64EB__ rev64 v17.16b,v17.16b #endif ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] ext v7.16b,v17.16b,v17.16b,#8 ext v3.16b,v16.16b,v16.16b,#8 eor v0.16b,v1.16b,v18.16b pmull v4.1q,v20.1d,v7.1d //H·Ii+1 eor v3.16b,v3.16b,v2.16b //accumulate v3.16b early ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v3.16b,v3.16b,v18.16b eor v17.16b,v17.16b,v7.16b //Karatsuba pre-processing eor v3.16b,v3.16b,v0.16b pmull2 v6.1q,v20.2d,v7.2d b.hs .Loop_mod2x_v8 //there was at least 32 more bytes eor v2.16b,v2.16b,v18.16b ext v3.16b,v16.16b,v16.16b,#8 //re-construct v3.16b adds x3,x3,#32 //re-construct x3 eor v0.16b,v0.16b,v2.16b //re-construct v0.16b b.eq .Ldone_v8 //is x3 zero? .Lodd_tail_v8: ext v18.16b,v0.16b,v0.16b,#8 eor v3.16b,v3.16b,v0.16b //inp^=Xi eor v17.16b,v16.16b,v18.16b //v17.16b is rotated inp^Xi pmull v0.1q,v20.1d,v3.1d //H.lo·Xi.lo eor v17.16b,v17.16b,v3.16b //Karatsuba pre-processing pmull2 v2.1q,v20.2d,v3.2d //H.hi·Xi.hi pmull v1.1q,v21.1d,v17.1d //(H.lo+H.hi)·(Xi.lo+Xi.hi) ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b .Ldone_v8: #ifndef __AARCH64EB__ rev64 v0.16b,v0.16b #endif ext v0.16b,v0.16b,v0.16b,#8 st1 {v0.2d},[x0] //write out Xi ret .size gcm_ghash_v8,.-gcm_ghash_v8 .type gcm_ghash_v8_4x,%function .align 4 gcm_ghash_v8_4x: .Lgcm_ghash_v8_4x: ld1 {v0.2d},[x0] //load [rotated] Xi ld1 {v20.2d,v21.2d,v22.2d},[x1],#48 //load twisted H, ..., H^2 ext v20.16b,v20.16b,v20.16b,#8 ext v22.16b,v22.16b,v22.16b,#8 movi v19.16b,#0xe1 ld1 {v26.2d,v27.2d,v28.2d},[x1] //load twisted H^3, ..., H^4 ext v26.16b,v26.16b,v26.16b,#8 ext v28.16b,v28.16b,v28.16b,#8 shl v19.2d,v19.2d,#57 //compose 0xc2.0 constant ld1 {v4.2d,v5.2d,v6.2d,v7.2d},[x2],#64 #ifndef __AARCH64EB__ rev64 v0.16b,v0.16b rev64 v5.16b,v5.16b rev64 v6.16b,v6.16b rev64 v7.16b,v7.16b rev64 v4.16b,v4.16b #endif ext v25.16b,v7.16b,v7.16b,#8 ext v24.16b,v6.16b,v6.16b,#8 ext v23.16b,v5.16b,v5.16b,#8 pmull v29.1q,v20.1d,v25.1d //H·Ii+3 eor v7.16b,v7.16b,v25.16b pmull2 v31.1q,v20.2d,v25.2d pmull v30.1q,v21.1d,v7.1d pmull v16.1q,v22.1d,v24.1d //H^2·Ii+2 eor v6.16b,v6.16b,v24.16b pmull2 v24.1q,v22.2d,v24.2d pmull2 v6.1q,v21.2d,v6.2d eor v29.16b,v29.16b,v16.16b eor v31.16b,v31.16b,v24.16b eor v30.16b,v30.16b,v6.16b pmull v7.1q,v26.1d,v23.1d //H^3·Ii+1 eor v5.16b,v5.16b,v23.16b pmull2 v23.1q,v26.2d,v23.2d pmull v5.1q,v27.1d,v5.1d eor v29.16b,v29.16b,v7.16b eor v31.16b,v31.16b,v23.16b eor v30.16b,v30.16b,v5.16b subs x3,x3,#128 b.lo .Ltail4x b .Loop4x .align 4 .Loop4x: eor v16.16b,v4.16b,v0.16b ld1 {v4.2d,v5.2d,v6.2d,v7.2d},[x2],#64 ext v3.16b,v16.16b,v16.16b,#8 #ifndef __AARCH64EB__ rev64 v5.16b,v5.16b rev64 v6.16b,v6.16b rev64 v7.16b,v7.16b rev64 v4.16b,v4.16b #endif pmull v0.1q,v28.1d,v3.1d //H^4·(Xi+Ii) eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v28.2d,v3.2d ext v25.16b,v7.16b,v7.16b,#8 pmull2 v1.1q,v27.2d,v16.2d eor v0.16b,v0.16b,v29.16b eor v2.16b,v2.16b,v31.16b ext v24.16b,v6.16b,v6.16b,#8 eor v1.16b,v1.16b,v30.16b ext v23.16b,v5.16b,v5.16b,#8 ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b pmull v29.1q,v20.1d,v25.1d //H·Ii+3 eor v7.16b,v7.16b,v25.16b eor v1.16b,v1.16b,v17.16b pmull2 v31.1q,v20.2d,v25.2d eor v1.16b,v1.16b,v18.16b pmull v30.1q,v21.1d,v7.1d pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] pmull v16.1q,v22.1d,v24.1d //H^2·Ii+2 eor v6.16b,v6.16b,v24.16b pmull2 v24.1q,v22.2d,v24.2d eor v0.16b,v1.16b,v18.16b pmull2 v6.1q,v21.2d,v6.2d eor v29.16b,v29.16b,v16.16b eor v31.16b,v31.16b,v24.16b eor v30.16b,v30.16b,v6.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d pmull v7.1q,v26.1d,v23.1d //H^3·Ii+1 eor v5.16b,v5.16b,v23.16b eor v18.16b,v18.16b,v2.16b pmull2 v23.1q,v26.2d,v23.2d pmull v5.1q,v27.1d,v5.1d eor v0.16b,v0.16b,v18.16b eor v29.16b,v29.16b,v7.16b eor v31.16b,v31.16b,v23.16b ext v0.16b,v0.16b,v0.16b,#8 eor v30.16b,v30.16b,v5.16b subs x3,x3,#64 b.hs .Loop4x .Ltail4x: eor v16.16b,v4.16b,v0.16b ext v3.16b,v16.16b,v16.16b,#8 pmull v0.1q,v28.1d,v3.1d //H^4·(Xi+Ii) eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v28.2d,v3.2d pmull2 v1.1q,v27.2d,v16.2d eor v0.16b,v0.16b,v29.16b eor v2.16b,v2.16b,v31.16b eor v1.16b,v1.16b,v30.16b adds x3,x3,#64 b.eq .Ldone4x cmp x3,#32 b.lo .Lone b.eq .Ltwo .Lthree: ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b ld1 {v4.2d,v5.2d,v6.2d},[x2] eor v1.16b,v1.16b,v18.16b #ifndef __AARCH64EB__ rev64 v5.16b,v5.16b rev64 v6.16b,v6.16b rev64 v4.16b,v4.16b #endif pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] ext v24.16b,v6.16b,v6.16b,#8 ext v23.16b,v5.16b,v5.16b,#8 eor v0.16b,v1.16b,v18.16b pmull v29.1q,v20.1d,v24.1d //H·Ii+2 eor v6.16b,v6.16b,v24.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b pmull2 v31.1q,v20.2d,v24.2d pmull v30.1q,v21.1d,v6.1d eor v0.16b,v0.16b,v18.16b pmull v7.1q,v22.1d,v23.1d //H^2·Ii+1 eor v5.16b,v5.16b,v23.16b ext v0.16b,v0.16b,v0.16b,#8 pmull2 v23.1q,v22.2d,v23.2d eor v16.16b,v4.16b,v0.16b pmull2 v5.1q,v21.2d,v5.2d ext v3.16b,v16.16b,v16.16b,#8 eor v29.16b,v29.16b,v7.16b eor v31.16b,v31.16b,v23.16b eor v30.16b,v30.16b,v5.16b pmull v0.1q,v26.1d,v3.1d //H^3·(Xi+Ii) eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v26.2d,v3.2d pmull v1.1q,v27.1d,v16.1d eor v0.16b,v0.16b,v29.16b eor v2.16b,v2.16b,v31.16b eor v1.16b,v1.16b,v30.16b b .Ldone4x .align 4 .Ltwo: ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b ld1 {v4.2d,v5.2d},[x2] eor v1.16b,v1.16b,v18.16b #ifndef __AARCH64EB__ rev64 v5.16b,v5.16b rev64 v4.16b,v4.16b #endif pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] ext v23.16b,v5.16b,v5.16b,#8 eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b ext v0.16b,v0.16b,v0.16b,#8 pmull v29.1q,v20.1d,v23.1d //H·Ii+1 eor v5.16b,v5.16b,v23.16b eor v16.16b,v4.16b,v0.16b ext v3.16b,v16.16b,v16.16b,#8 pmull2 v31.1q,v20.2d,v23.2d pmull v30.1q,v21.1d,v5.1d pmull v0.1q,v22.1d,v3.1d //H^2·(Xi+Ii) eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v22.2d,v3.2d pmull2 v1.1q,v21.2d,v16.2d eor v0.16b,v0.16b,v29.16b eor v2.16b,v2.16b,v31.16b eor v1.16b,v1.16b,v30.16b b .Ldone4x .align 4 .Lone: ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b ld1 {v4.2d},[x2] eor v1.16b,v1.16b,v18.16b #ifndef __AARCH64EB__ rev64 v4.16b,v4.16b #endif pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b ext v0.16b,v0.16b,v0.16b,#8 eor v16.16b,v4.16b,v0.16b ext v3.16b,v16.16b,v16.16b,#8 pmull v0.1q,v20.1d,v3.1d eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v20.2d,v3.2d pmull v1.1q,v21.1d,v16.1d .Ldone4x: ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b ext v0.16b,v0.16b,v0.16b,#8 #ifndef __AARCH64EB__ rev64 v0.16b,v0.16b #endif st1 {v0.2d},[x0] //write out Xi ret .size gcm_ghash_v8_4x,.-gcm_ghash_v8_4x .byte 71,72,65,83,72,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
43,785
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/vpaes-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .section .rodata .type _vpaes_consts,%object .align 7 // totally strategic alignment _vpaes_consts: .Lk_mc_forward: // mc_forward .quad 0x0407060500030201, 0x0C0F0E0D080B0A09 .quad 0x080B0A0904070605, 0x000302010C0F0E0D .quad 0x0C0F0E0D080B0A09, 0x0407060500030201 .quad 0x000302010C0F0E0D, 0x080B0A0904070605 .Lk_mc_backward: // mc_backward .quad 0x0605040702010003, 0x0E0D0C0F0A09080B .quad 0x020100030E0D0C0F, 0x0A09080B06050407 .quad 0x0E0D0C0F0A09080B, 0x0605040702010003 .quad 0x0A09080B06050407, 0x020100030E0D0C0F .Lk_sr: // sr .quad 0x0706050403020100, 0x0F0E0D0C0B0A0908 .quad 0x030E09040F0A0500, 0x0B06010C07020D08 .quad 0x0F060D040B020900, 0x070E050C030A0108 .quad 0x0B0E0104070A0D00, 0x0306090C0F020508 // // "Hot" constants // .Lk_inv: // inv, inva .quad 0x0E05060F0D080180, 0x040703090A0B0C02 .quad 0x01040A060F0B0780, 0x030D0E0C02050809 .Lk_ipt: // input transform (lo, hi) .quad 0xC2B2E8985A2A7000, 0xCABAE09052227808 .quad 0x4C01307D317C4D00, 0xCD80B1FCB0FDCC81 .Lk_sbo: // sbou, sbot .quad 0xD0D26D176FBDC700, 0x15AABF7AC502A878 .quad 0xCFE474A55FBB6A00, 0x8E1E90D1412B35FA .Lk_sb1: // sb1u, sb1t .quad 0x3618D415FAE22300, 0x3BF7CCC10D2ED9EF .quad 0xB19BE18FCB503E00, 0xA5DF7A6E142AF544 .Lk_sb2: // sb2u, sb2t .quad 0x69EB88400AE12900, 0xC2A163C8AB82234A .quad 0xE27A93C60B712400, 0x5EB7E955BC982FCD // // Decryption stuff // .Lk_dipt: // decryption input transform .quad 0x0F505B040B545F00, 0x154A411E114E451A .quad 0x86E383E660056500, 0x12771772F491F194 .Lk_dsbo: // decryption sbox final output .quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D .quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C .Lk_dsb9: // decryption sbox output *9*u, *9*t .quad 0x851C03539A86D600, 0xCAD51F504F994CC9 .quad 0xC03B1789ECD74900, 0x725E2C9EB2FBA565 .Lk_dsbd: // decryption sbox output *D*u, *D*t .quad 0x7D57CCDFE6B1A200, 0xF56E9B13882A4439 .quad 0x3CE2FAF724C6CB00, 0x2931180D15DEEFD3 .Lk_dsbb: // decryption sbox output *B*u, *B*t .quad 0xD022649296B44200, 0x602646F6B0F2D404 .quad 0xC19498A6CD596700, 0xF3FF0C3E3255AA6B .Lk_dsbe: // decryption sbox output *E*u, *E*t .quad 0x46F2929626D4D000, 0x2242600464B4F6B0 .quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32 // // Key schedule constants // .Lk_dksd: // decryption key schedule: invskew x*D .quad 0xFEB91A5DA3E44700, 0x0740E3A45A1DBEF9 .quad 0x41C277F4B5368300, 0x5FDC69EAAB289D1E .Lk_dksb: // decryption key schedule: invskew x*B .quad 0x9A4FCA1F8550D500, 0x03D653861CC94C99 .quad 0x115BEDA7B6FC4A00, 0xD993256F7E3482C8 .Lk_dkse: // decryption key schedule: invskew x*E + 0x63 .quad 0xD5031CCA1FC9D600, 0x53859A4C994F5086 .quad 0xA23196054FDC7BE8, 0xCD5EF96A20B31487 .Lk_dks9: // decryption key schedule: invskew x*9 .quad 0xB6116FC87ED9A700, 0x4AED933482255BFC .quad 0x4576516227143300, 0x8BB89FACE9DAFDCE .Lk_rcon: // rcon .quad 0x1F8391B9AF9DEEB6, 0x702A98084D7C7D81 .Lk_opt: // output transform .quad 0xFF9F4929D6B66000, 0xF7974121DEBE6808 .quad 0x01EDBD5150BCEC00, 0xE10D5DB1B05C0CE0 .Lk_deskew: // deskew tables: inverts the sbox's "skew" .quad 0x07E4A34047A4E300, 0x1DFEB95A5DBEF91A .quad 0x5F36B5DC83EA6900, 0x2841C2ABF49D1E77 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65,69,83,32,102,111,114,32,65,82,77,118,56,44,32,77,105,107,101,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105,118,101,114,115,105,116,121,41,0 .align 2 .size _vpaes_consts,.-_vpaes_consts .align 6 .text ## ## _aes_preheat ## ## Fills register %r10 -> .aes_consts (so you can -fPIC) ## and %xmm9-%xmm15 as specified below. ## .type _vpaes_encrypt_preheat,%function .align 4 _vpaes_encrypt_preheat: adrp x10, .Lk_inv add x10, x10, :lo12:.Lk_inv movi v17.16b, #0x0f ld1 {v18.2d,v19.2d}, [x10],#32 // .Lk_inv ld1 {v20.2d,v21.2d,v22.2d,v23.2d}, [x10],#64 // .Lk_ipt, .Lk_sbo ld1 {v24.2d,v25.2d,v26.2d,v27.2d}, [x10] // .Lk_sb1, .Lk_sb2 ret .size _vpaes_encrypt_preheat,.-_vpaes_encrypt_preheat ## ## _aes_encrypt_core ## ## AES-encrypt %xmm0. ## ## Inputs: ## %xmm0 = input ## %xmm9-%xmm15 as in _vpaes_preheat ## (%rdx) = scheduled keys ## ## Output in %xmm0 ## Clobbers %xmm1-%xmm5, %r9, %r10, %r11, %rax ## Preserves %xmm6 - %xmm8 so you get some local vectors ## ## .type _vpaes_encrypt_core,%function .align 4 _vpaes_encrypt_core: mov x9, x2 ldr w8, [x2,#240] // pull rounds adrp x11, .Lk_mc_forward+16 add x11, x11, :lo12:.Lk_mc_forward+16 // vmovdqa .Lk_ipt(%rip), %xmm2 # iptlo ld1 {v16.2d}, [x9], #16 // vmovdqu (%r9), %xmm5 # round0 key and v1.16b, v7.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v7.16b, #4 // vpsrlb $4, %xmm0, %xmm0 tbl v1.16b, {v20.16b}, v1.16b // vpshufb %xmm1, %xmm2, %xmm1 // vmovdqa .Lk_ipt+16(%rip), %xmm3 # ipthi tbl v2.16b, {v21.16b}, v0.16b // vpshufb %xmm0, %xmm3, %xmm2 eor v0.16b, v1.16b, v16.16b // vpxor %xmm5, %xmm1, %xmm0 eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 b .Lenc_entry .align 4 .Lenc_loop: // middle of middle round add x10, x11, #0x40 tbl v4.16b, {v25.16b}, v2.16b // vpshufb %xmm2, %xmm13, %xmm4 # 4 = sb1u ld1 {v1.2d}, [x11], #16 // vmovdqa -0x40(%r11,%r10), %xmm1 # .Lk_mc_forward[] tbl v0.16b, {v24.16b}, v3.16b // vpshufb %xmm3, %xmm12, %xmm0 # 0 = sb1t eor v4.16b, v4.16b, v16.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = sb1u + k tbl v5.16b, {v27.16b}, v2.16b // vpshufb %xmm2, %xmm15, %xmm5 # 4 = sb2u eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = A tbl v2.16b, {v26.16b}, v3.16b // vpshufb %xmm3, %xmm14, %xmm2 # 2 = sb2t ld1 {v4.2d}, [x10] // vmovdqa (%r11,%r10), %xmm4 # .Lk_mc_backward[] tbl v3.16b, {v0.16b}, v1.16b // vpshufb %xmm1, %xmm0, %xmm3 # 0 = B eor v2.16b, v2.16b, v5.16b // vpxor %xmm5, %xmm2, %xmm2 # 2 = 2A tbl v0.16b, {v0.16b}, v4.16b // vpshufb %xmm4, %xmm0, %xmm0 # 3 = D eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 0 = 2A+B tbl v4.16b, {v3.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm4 # 0 = 2B+C eor v0.16b, v0.16b, v3.16b // vpxor %xmm3, %xmm0, %xmm0 # 3 = 2A+B+D and x11, x11, #~(1<<6) // and $0x30, %r11 # ... mod 4 eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = 2A+3B+C+D sub w8, w8, #1 // nr-- .Lenc_entry: // top of round and v1.16b, v0.16b, v17.16b // vpand %xmm0, %xmm9, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i tbl v5.16b, {v19.16b}, v1.16b // vpshufb %xmm1, %xmm11, %xmm5 # 2 = a/k eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j tbl v3.16b, {v18.16b}, v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i tbl v4.16b, {v18.16b}, v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j eor v3.16b, v3.16b, v5.16b // vpxor %xmm5, %xmm3, %xmm3 # 3 = iak = 1/i + a/k eor v4.16b, v4.16b, v5.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = jak = 1/j + a/k tbl v2.16b, {v18.16b}, v3.16b // vpshufb %xmm3, %xmm10, %xmm2 # 2 = 1/iak tbl v3.16b, {v18.16b}, v4.16b // vpshufb %xmm4, %xmm10, %xmm3 # 3 = 1/jak eor v2.16b, v2.16b, v1.16b // vpxor %xmm1, %xmm2, %xmm2 # 2 = io eor v3.16b, v3.16b, v0.16b // vpxor %xmm0, %xmm3, %xmm3 # 3 = jo ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm5 cbnz w8, .Lenc_loop // middle of last round add x10, x11, #0x80 // vmovdqa -0x60(%r10), %xmm4 # 3 : sbou .Lk_sbo // vmovdqa -0x50(%r10), %xmm0 # 0 : sbot .Lk_sbo+16 tbl v4.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbou ld1 {v1.2d}, [x10] // vmovdqa 0x40(%r11,%r10), %xmm1 # .Lk_sr[] tbl v0.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm0, %xmm0 # 0 = sb1t eor v4.16b, v4.16b, v16.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = sb1u + k eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = A tbl v0.16b, {v0.16b}, v1.16b // vpshufb %xmm1, %xmm0, %xmm0 ret .size _vpaes_encrypt_core,.-_vpaes_encrypt_core .globl vpaes_encrypt .hidden vpaes_encrypt .type vpaes_encrypt,%function .align 4 vpaes_encrypt: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#4] // kFlag_vpaes_encrypt #endif AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ld1 {v7.16b}, [x0] bl _vpaes_encrypt_preheat bl _vpaes_encrypt_core st1 {v0.16b}, [x1] ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size vpaes_encrypt,.-vpaes_encrypt .type _vpaes_encrypt_2x,%function .align 4 _vpaes_encrypt_2x: mov x9, x2 ldr w8, [x2,#240] // pull rounds adrp x11, .Lk_mc_forward+16 add x11, x11, :lo12:.Lk_mc_forward+16 // vmovdqa .Lk_ipt(%rip), %xmm2 # iptlo ld1 {v16.2d}, [x9], #16 // vmovdqu (%r9), %xmm5 # round0 key and v1.16b, v14.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v14.16b, #4 // vpsrlb $4, %xmm0, %xmm0 and v9.16b, v15.16b, v17.16b ushr v8.16b, v15.16b, #4 tbl v1.16b, {v20.16b}, v1.16b // vpshufb %xmm1, %xmm2, %xmm1 tbl v9.16b, {v20.16b}, v9.16b // vmovdqa .Lk_ipt+16(%rip), %xmm3 # ipthi tbl v2.16b, {v21.16b}, v0.16b // vpshufb %xmm0, %xmm3, %xmm2 tbl v10.16b, {v21.16b}, v8.16b eor v0.16b, v1.16b, v16.16b // vpxor %xmm5, %xmm1, %xmm0 eor v8.16b, v9.16b, v16.16b eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 eor v8.16b, v8.16b, v10.16b b .Lenc_2x_entry .align 4 .Lenc_2x_loop: // middle of middle round add x10, x11, #0x40 tbl v4.16b, {v25.16b}, v2.16b // vpshufb %xmm2, %xmm13, %xmm4 # 4 = sb1u tbl v12.16b, {v25.16b}, v10.16b ld1 {v1.2d}, [x11], #16 // vmovdqa -0x40(%r11,%r10), %xmm1 # .Lk_mc_forward[] tbl v0.16b, {v24.16b}, v3.16b // vpshufb %xmm3, %xmm12, %xmm0 # 0 = sb1t tbl v8.16b, {v24.16b}, v11.16b eor v4.16b, v4.16b, v16.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = sb1u + k eor v12.16b, v12.16b, v16.16b tbl v5.16b, {v27.16b}, v2.16b // vpshufb %xmm2, %xmm15, %xmm5 # 4 = sb2u tbl v13.16b, {v27.16b}, v10.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = A eor v8.16b, v8.16b, v12.16b tbl v2.16b, {v26.16b}, v3.16b // vpshufb %xmm3, %xmm14, %xmm2 # 2 = sb2t tbl v10.16b, {v26.16b}, v11.16b ld1 {v4.2d}, [x10] // vmovdqa (%r11,%r10), %xmm4 # .Lk_mc_backward[] tbl v3.16b, {v0.16b}, v1.16b // vpshufb %xmm1, %xmm0, %xmm3 # 0 = B tbl v11.16b, {v8.16b}, v1.16b eor v2.16b, v2.16b, v5.16b // vpxor %xmm5, %xmm2, %xmm2 # 2 = 2A eor v10.16b, v10.16b, v13.16b tbl v0.16b, {v0.16b}, v4.16b // vpshufb %xmm4, %xmm0, %xmm0 # 3 = D tbl v8.16b, {v8.16b}, v4.16b eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 0 = 2A+B eor v11.16b, v11.16b, v10.16b tbl v4.16b, {v3.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm4 # 0 = 2B+C tbl v12.16b, {v11.16b},v1.16b eor v0.16b, v0.16b, v3.16b // vpxor %xmm3, %xmm0, %xmm0 # 3 = 2A+B+D eor v8.16b, v8.16b, v11.16b and x11, x11, #~(1<<6) // and $0x30, %r11 # ... mod 4 eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = 2A+3B+C+D eor v8.16b, v8.16b, v12.16b sub w8, w8, #1 // nr-- .Lenc_2x_entry: // top of round and v1.16b, v0.16b, v17.16b // vpand %xmm0, %xmm9, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i and v9.16b, v8.16b, v17.16b ushr v8.16b, v8.16b, #4 tbl v5.16b, {v19.16b},v1.16b // vpshufb %xmm1, %xmm11, %xmm5 # 2 = a/k tbl v13.16b, {v19.16b},v9.16b eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j eor v9.16b, v9.16b, v8.16b tbl v3.16b, {v18.16b},v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i tbl v11.16b, {v18.16b},v8.16b tbl v4.16b, {v18.16b},v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j tbl v12.16b, {v18.16b},v9.16b eor v3.16b, v3.16b, v5.16b // vpxor %xmm5, %xmm3, %xmm3 # 3 = iak = 1/i + a/k eor v11.16b, v11.16b, v13.16b eor v4.16b, v4.16b, v5.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = jak = 1/j + a/k eor v12.16b, v12.16b, v13.16b tbl v2.16b, {v18.16b},v3.16b // vpshufb %xmm3, %xmm10, %xmm2 # 2 = 1/iak tbl v10.16b, {v18.16b},v11.16b tbl v3.16b, {v18.16b},v4.16b // vpshufb %xmm4, %xmm10, %xmm3 # 3 = 1/jak tbl v11.16b, {v18.16b},v12.16b eor v2.16b, v2.16b, v1.16b // vpxor %xmm1, %xmm2, %xmm2 # 2 = io eor v10.16b, v10.16b, v9.16b eor v3.16b, v3.16b, v0.16b // vpxor %xmm0, %xmm3, %xmm3 # 3 = jo eor v11.16b, v11.16b, v8.16b ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm5 cbnz w8, .Lenc_2x_loop // middle of last round add x10, x11, #0x80 // vmovdqa -0x60(%r10), %xmm4 # 3 : sbou .Lk_sbo // vmovdqa -0x50(%r10), %xmm0 # 0 : sbot .Lk_sbo+16 tbl v4.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbou tbl v12.16b, {v22.16b}, v10.16b ld1 {v1.2d}, [x10] // vmovdqa 0x40(%r11,%r10), %xmm1 # .Lk_sr[] tbl v0.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm0, %xmm0 # 0 = sb1t tbl v8.16b, {v23.16b}, v11.16b eor v4.16b, v4.16b, v16.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = sb1u + k eor v12.16b, v12.16b, v16.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = A eor v8.16b, v8.16b, v12.16b tbl v0.16b, {v0.16b},v1.16b // vpshufb %xmm1, %xmm0, %xmm0 tbl v1.16b, {v8.16b},v1.16b ret .size _vpaes_encrypt_2x,.-_vpaes_encrypt_2x .type _vpaes_decrypt_preheat,%function .align 4 _vpaes_decrypt_preheat: adrp x10, .Lk_inv add x10, x10, :lo12:.Lk_inv movi v17.16b, #0x0f adrp x11, .Lk_dipt add x11, x11, :lo12:.Lk_dipt ld1 {v18.2d,v19.2d}, [x10],#32 // .Lk_inv ld1 {v20.2d,v21.2d,v22.2d,v23.2d}, [x11],#64 // .Lk_dipt, .Lk_dsbo ld1 {v24.2d,v25.2d,v26.2d,v27.2d}, [x11],#64 // .Lk_dsb9, .Lk_dsbd ld1 {v28.2d,v29.2d,v30.2d,v31.2d}, [x11] // .Lk_dsbb, .Lk_dsbe ret .size _vpaes_decrypt_preheat,.-_vpaes_decrypt_preheat ## ## Decryption core ## ## Same API as encryption core. ## .type _vpaes_decrypt_core,%function .align 4 _vpaes_decrypt_core: mov x9, x2 ldr w8, [x2,#240] // pull rounds // vmovdqa .Lk_dipt(%rip), %xmm2 # iptlo lsl x11, x8, #4 // mov %rax, %r11; shl $4, %r11 eor x11, x11, #0x30 // xor $0x30, %r11 adrp x10, .Lk_sr add x10, x10, :lo12:.Lk_sr and x11, x11, #0x30 // and $0x30, %r11 add x11, x11, x10 adrp x10, .Lk_mc_forward+48 add x10, x10, :lo12:.Lk_mc_forward+48 ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm4 # round0 key and v1.16b, v7.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v7.16b, #4 // vpsrlb $4, %xmm0, %xmm0 tbl v2.16b, {v20.16b}, v1.16b // vpshufb %xmm1, %xmm2, %xmm2 ld1 {v5.2d}, [x10] // vmovdqa .Lk_mc_forward+48(%rip), %xmm5 // vmovdqa .Lk_dipt+16(%rip), %xmm1 # ipthi tbl v0.16b, {v21.16b}, v0.16b // vpshufb %xmm0, %xmm1, %xmm0 eor v2.16b, v2.16b, v16.16b // vpxor %xmm4, %xmm2, %xmm2 eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 b .Ldec_entry .align 4 .Ldec_loop: // // Inverse mix columns // // vmovdqa -0x20(%r10),%xmm4 # 4 : sb9u // vmovdqa -0x10(%r10),%xmm1 # 0 : sb9t tbl v4.16b, {v24.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sb9u tbl v1.16b, {v25.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sb9t eor v0.16b, v4.16b, v16.16b // vpxor %xmm4, %xmm0, %xmm0 // vmovdqa 0x00(%r10),%xmm4 # 4 : sbdu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch // vmovdqa 0x10(%r10),%xmm1 # 0 : sbdt tbl v4.16b, {v26.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbdu tbl v0.16b, {v0.16b}, v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v1.16b, {v27.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbdt eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch // vmovdqa 0x20(%r10), %xmm4 # 4 : sbbu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch // vmovdqa 0x30(%r10), %xmm1 # 0 : sbbt tbl v4.16b, {v28.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbbu tbl v0.16b, {v0.16b}, v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v1.16b, {v29.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbbt eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch // vmovdqa 0x40(%r10), %xmm4 # 4 : sbeu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch // vmovdqa 0x50(%r10), %xmm1 # 0 : sbet tbl v4.16b, {v30.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbeu tbl v0.16b, {v0.16b}, v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v1.16b, {v31.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbet eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch ext v5.16b, v5.16b, v5.16b, #12 // vpalignr $12, %xmm5, %xmm5, %xmm5 eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch sub w8, w8, #1 // sub $1,%rax # nr-- .Ldec_entry: // top of round and v1.16b, v0.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i tbl v2.16b, {v19.16b}, v1.16b // vpshufb %xmm1, %xmm11, %xmm2 # 2 = a/k eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j tbl v3.16b, {v18.16b}, v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i tbl v4.16b, {v18.16b}, v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 3 = iak = 1/i + a/k eor v4.16b, v4.16b, v2.16b // vpxor %xmm2, %xmm4, %xmm4 # 4 = jak = 1/j + a/k tbl v2.16b, {v18.16b}, v3.16b // vpshufb %xmm3, %xmm10, %xmm2 # 2 = 1/iak tbl v3.16b, {v18.16b}, v4.16b // vpshufb %xmm4, %xmm10, %xmm3 # 3 = 1/jak eor v2.16b, v2.16b, v1.16b // vpxor %xmm1, %xmm2, %xmm2 # 2 = io eor v3.16b, v3.16b, v0.16b // vpxor %xmm0, %xmm3, %xmm3 # 3 = jo ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm0 cbnz w8, .Ldec_loop // middle of last round // vmovdqa 0x60(%r10), %xmm4 # 3 : sbou tbl v4.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbou // vmovdqa 0x70(%r10), %xmm1 # 0 : sbot ld1 {v2.2d}, [x11] // vmovdqa -0x160(%r11), %xmm2 # .Lk_sr-.Lk_dsbd=-0x160 tbl v1.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sb1t eor v4.16b, v4.16b, v16.16b // vpxor %xmm0, %xmm4, %xmm4 # 4 = sb1u + k eor v0.16b, v1.16b, v4.16b // vpxor %xmm4, %xmm1, %xmm0 # 0 = A tbl v0.16b, {v0.16b}, v2.16b // vpshufb %xmm2, %xmm0, %xmm0 ret .size _vpaes_decrypt_core,.-_vpaes_decrypt_core .globl vpaes_decrypt .hidden vpaes_decrypt .type vpaes_decrypt,%function .align 4 vpaes_decrypt: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ld1 {v7.16b}, [x0] bl _vpaes_decrypt_preheat bl _vpaes_decrypt_core st1 {v0.16b}, [x1] ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size vpaes_decrypt,.-vpaes_decrypt // v14-v15 input, v0-v1 output .type _vpaes_decrypt_2x,%function .align 4 _vpaes_decrypt_2x: mov x9, x2 ldr w8, [x2,#240] // pull rounds // vmovdqa .Lk_dipt(%rip), %xmm2 # iptlo lsl x11, x8, #4 // mov %rax, %r11; shl $4, %r11 eor x11, x11, #0x30 // xor $0x30, %r11 adrp x10, .Lk_sr add x10, x10, :lo12:.Lk_sr and x11, x11, #0x30 // and $0x30, %r11 add x11, x11, x10 adrp x10, .Lk_mc_forward+48 add x10, x10, :lo12:.Lk_mc_forward+48 ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm4 # round0 key and v1.16b, v14.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v14.16b, #4 // vpsrlb $4, %xmm0, %xmm0 and v9.16b, v15.16b, v17.16b ushr v8.16b, v15.16b, #4 tbl v2.16b, {v20.16b},v1.16b // vpshufb %xmm1, %xmm2, %xmm2 tbl v10.16b, {v20.16b},v9.16b ld1 {v5.2d}, [x10] // vmovdqa .Lk_mc_forward+48(%rip), %xmm5 // vmovdqa .Lk_dipt+16(%rip), %xmm1 # ipthi tbl v0.16b, {v21.16b},v0.16b // vpshufb %xmm0, %xmm1, %xmm0 tbl v8.16b, {v21.16b},v8.16b eor v2.16b, v2.16b, v16.16b // vpxor %xmm4, %xmm2, %xmm2 eor v10.16b, v10.16b, v16.16b eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 eor v8.16b, v8.16b, v10.16b b .Ldec_2x_entry .align 4 .Ldec_2x_loop: // // Inverse mix columns // // vmovdqa -0x20(%r10),%xmm4 # 4 : sb9u // vmovdqa -0x10(%r10),%xmm1 # 0 : sb9t tbl v4.16b, {v24.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sb9u tbl v12.16b, {v24.16b}, v10.16b tbl v1.16b, {v25.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sb9t tbl v9.16b, {v25.16b}, v11.16b eor v0.16b, v4.16b, v16.16b // vpxor %xmm4, %xmm0, %xmm0 eor v8.16b, v12.16b, v16.16b // vmovdqa 0x00(%r10),%xmm4 # 4 : sbdu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch eor v8.16b, v8.16b, v9.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch // vmovdqa 0x10(%r10),%xmm1 # 0 : sbdt tbl v4.16b, {v26.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbdu tbl v12.16b, {v26.16b}, v10.16b tbl v0.16b, {v0.16b},v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v8.16b, {v8.16b},v5.16b tbl v1.16b, {v27.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbdt tbl v9.16b, {v27.16b}, v11.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch eor v8.16b, v8.16b, v12.16b // vmovdqa 0x20(%r10), %xmm4 # 4 : sbbu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch eor v8.16b, v8.16b, v9.16b // vmovdqa 0x30(%r10), %xmm1 # 0 : sbbt tbl v4.16b, {v28.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbbu tbl v12.16b, {v28.16b}, v10.16b tbl v0.16b, {v0.16b},v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v8.16b, {v8.16b},v5.16b tbl v1.16b, {v29.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbbt tbl v9.16b, {v29.16b}, v11.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch eor v8.16b, v8.16b, v12.16b // vmovdqa 0x40(%r10), %xmm4 # 4 : sbeu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch eor v8.16b, v8.16b, v9.16b // vmovdqa 0x50(%r10), %xmm1 # 0 : sbet tbl v4.16b, {v30.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbeu tbl v12.16b, {v30.16b}, v10.16b tbl v0.16b, {v0.16b},v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v8.16b, {v8.16b},v5.16b tbl v1.16b, {v31.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbet tbl v9.16b, {v31.16b}, v11.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch eor v8.16b, v8.16b, v12.16b ext v5.16b, v5.16b, v5.16b, #12 // vpalignr $12, %xmm5, %xmm5, %xmm5 eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch eor v8.16b, v8.16b, v9.16b sub w8, w8, #1 // sub $1,%rax # nr-- .Ldec_2x_entry: // top of round and v1.16b, v0.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i and v9.16b, v8.16b, v17.16b ushr v8.16b, v8.16b, #4 tbl v2.16b, {v19.16b},v1.16b // vpshufb %xmm1, %xmm11, %xmm2 # 2 = a/k tbl v10.16b, {v19.16b},v9.16b eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j eor v9.16b, v9.16b, v8.16b tbl v3.16b, {v18.16b},v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i tbl v11.16b, {v18.16b},v8.16b tbl v4.16b, {v18.16b},v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j tbl v12.16b, {v18.16b},v9.16b eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 3 = iak = 1/i + a/k eor v11.16b, v11.16b, v10.16b eor v4.16b, v4.16b, v2.16b // vpxor %xmm2, %xmm4, %xmm4 # 4 = jak = 1/j + a/k eor v12.16b, v12.16b, v10.16b tbl v2.16b, {v18.16b},v3.16b // vpshufb %xmm3, %xmm10, %xmm2 # 2 = 1/iak tbl v10.16b, {v18.16b},v11.16b tbl v3.16b, {v18.16b},v4.16b // vpshufb %xmm4, %xmm10, %xmm3 # 3 = 1/jak tbl v11.16b, {v18.16b},v12.16b eor v2.16b, v2.16b, v1.16b // vpxor %xmm1, %xmm2, %xmm2 # 2 = io eor v10.16b, v10.16b, v9.16b eor v3.16b, v3.16b, v0.16b // vpxor %xmm0, %xmm3, %xmm3 # 3 = jo eor v11.16b, v11.16b, v8.16b ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm0 cbnz w8, .Ldec_2x_loop // middle of last round // vmovdqa 0x60(%r10), %xmm4 # 3 : sbou tbl v4.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbou tbl v12.16b, {v22.16b}, v10.16b // vmovdqa 0x70(%r10), %xmm1 # 0 : sbot tbl v1.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sb1t tbl v9.16b, {v23.16b}, v11.16b ld1 {v2.2d}, [x11] // vmovdqa -0x160(%r11), %xmm2 # .Lk_sr-.Lk_dsbd=-0x160 eor v4.16b, v4.16b, v16.16b // vpxor %xmm0, %xmm4, %xmm4 # 4 = sb1u + k eor v12.16b, v12.16b, v16.16b eor v0.16b, v1.16b, v4.16b // vpxor %xmm4, %xmm1, %xmm0 # 0 = A eor v8.16b, v9.16b, v12.16b tbl v0.16b, {v0.16b},v2.16b // vpshufb %xmm2, %xmm0, %xmm0 tbl v1.16b, {v8.16b},v2.16b ret .size _vpaes_decrypt_2x,.-_vpaes_decrypt_2x ######################################################## ## ## ## AES key schedule ## ## ## ######################################################## .type _vpaes_key_preheat,%function .align 4 _vpaes_key_preheat: adrp x10, .Lk_inv add x10, x10, :lo12:.Lk_inv movi v16.16b, #0x5b // .Lk_s63 adrp x11, .Lk_sb1 add x11, x11, :lo12:.Lk_sb1 movi v17.16b, #0x0f // .Lk_s0F ld1 {v18.2d,v19.2d,v20.2d,v21.2d}, [x10] // .Lk_inv, .Lk_ipt adrp x10, .Lk_dksd add x10, x10, :lo12:.Lk_dksd ld1 {v22.2d,v23.2d}, [x11] // .Lk_sb1 adrp x11, .Lk_mc_forward add x11, x11, :lo12:.Lk_mc_forward ld1 {v24.2d,v25.2d,v26.2d,v27.2d}, [x10],#64 // .Lk_dksd, .Lk_dksb ld1 {v28.2d,v29.2d,v30.2d,v31.2d}, [x10],#64 // .Lk_dkse, .Lk_dks9 ld1 {v8.2d}, [x10] // .Lk_rcon ld1 {v9.2d}, [x11] // .Lk_mc_forward[0] ret .size _vpaes_key_preheat,.-_vpaes_key_preheat .type _vpaes_schedule_core,%function .align 4 _vpaes_schedule_core: AARCH64_SIGN_LINK_REGISTER stp x29, x30, [sp,#-16]! add x29,sp,#0 bl _vpaes_key_preheat // load the tables ld1 {v0.16b}, [x0],#16 // vmovdqu (%rdi), %xmm0 # load key (unaligned) // input transform mov v3.16b, v0.16b // vmovdqa %xmm0, %xmm3 bl _vpaes_schedule_transform mov v7.16b, v0.16b // vmovdqa %xmm0, %xmm7 adrp x10, .Lk_sr // lea .Lk_sr(%rip),%r10 add x10, x10, :lo12:.Lk_sr add x8, x8, x10 cbnz w3, .Lschedule_am_decrypting // encrypting, output zeroth round key after transform st1 {v0.2d}, [x2] // vmovdqu %xmm0, (%rdx) b .Lschedule_go .Lschedule_am_decrypting: // decrypting, output zeroth round key after shiftrows ld1 {v1.2d}, [x8] // vmovdqa (%r8,%r10), %xmm1 tbl v3.16b, {v3.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 st1 {v3.2d}, [x2] // vmovdqu %xmm3, (%rdx) eor x8, x8, #0x30 // xor $0x30, %r8 .Lschedule_go: cmp w1, #192 // cmp $192, %esi b.hi .Lschedule_256 b.eq .Lschedule_192 // 128: fall though ## ## .schedule_128 ## ## 128-bit specific part of key schedule. ## ## This schedule is really simple, because all its parts ## are accomplished by the subroutines. ## .Lschedule_128: mov x0, #10 // mov $10, %esi .Loop_schedule_128: sub x0, x0, #1 // dec %esi bl _vpaes_schedule_round cbz x0, .Lschedule_mangle_last bl _vpaes_schedule_mangle // write output b .Loop_schedule_128 ## ## .aes_schedule_192 ## ## 192-bit specific part of key schedule. ## ## The main body of this schedule is the same as the 128-bit ## schedule, but with more smearing. The long, high side is ## stored in %xmm7 as before, and the short, low side is in ## the high bits of %xmm6. ## ## This schedule is somewhat nastier, however, because each ## round produces 192 bits of key material, or 1.5 round keys. ## Therefore, on each cycle we do 2 rounds and produce 3 round ## keys. ## .align 4 .Lschedule_192: sub x0, x0, #8 ld1 {v0.16b}, [x0] // vmovdqu 8(%rdi),%xmm0 # load key part 2 (very unaligned) bl _vpaes_schedule_transform // input transform mov v6.16b, v0.16b // vmovdqa %xmm0, %xmm6 # save short part eor v4.16b, v4.16b, v4.16b // vpxor %xmm4, %xmm4, %xmm4 # clear 4 ins v6.d[0], v4.d[0] // vmovhlps %xmm4, %xmm6, %xmm6 # clobber low side with zeros mov x0, #4 // mov $4, %esi .Loop_schedule_192: sub x0, x0, #1 // dec %esi bl _vpaes_schedule_round ext v0.16b, v6.16b, v0.16b, #8 // vpalignr $8,%xmm6,%xmm0,%xmm0 bl _vpaes_schedule_mangle // save key n bl _vpaes_schedule_192_smear bl _vpaes_schedule_mangle // save key n+1 bl _vpaes_schedule_round cbz x0, .Lschedule_mangle_last bl _vpaes_schedule_mangle // save key n+2 bl _vpaes_schedule_192_smear b .Loop_schedule_192 ## ## .aes_schedule_256 ## ## 256-bit specific part of key schedule. ## ## The structure here is very similar to the 128-bit ## schedule, but with an additional "low side" in ## %xmm6. The low side's rounds are the same as the ## high side's, except no rcon and no rotation. ## .align 4 .Lschedule_256: ld1 {v0.16b}, [x0] // vmovdqu 16(%rdi),%xmm0 # load key part 2 (unaligned) bl _vpaes_schedule_transform // input transform mov x0, #7 // mov $7, %esi .Loop_schedule_256: sub x0, x0, #1 // dec %esi bl _vpaes_schedule_mangle // output low result mov v6.16b, v0.16b // vmovdqa %xmm0, %xmm6 # save cur_lo in xmm6 // high round bl _vpaes_schedule_round cbz x0, .Lschedule_mangle_last bl _vpaes_schedule_mangle // low round. swap xmm7 and xmm6 dup v0.4s, v0.s[3] // vpshufd $0xFF, %xmm0, %xmm0 movi v4.16b, #0 mov v5.16b, v7.16b // vmovdqa %xmm7, %xmm5 mov v7.16b, v6.16b // vmovdqa %xmm6, %xmm7 bl _vpaes_schedule_low_round mov v7.16b, v5.16b // vmovdqa %xmm5, %xmm7 b .Loop_schedule_256 ## ## .aes_schedule_mangle_last ## ## Mangler for last round of key schedule ## Mangles %xmm0 ## when encrypting, outputs out(%xmm0) ^ 63 ## when decrypting, outputs unskew(%xmm0) ## ## Always called right before return... jumps to cleanup and exits ## .align 4 .Lschedule_mangle_last: // schedule last round key from xmm0 adrp x11, .Lk_deskew // lea .Lk_deskew(%rip),%r11 # prepare to deskew add x11, x11, :lo12:.Lk_deskew cbnz w3, .Lschedule_mangle_last_dec // encrypting ld1 {v1.2d}, [x8] // vmovdqa (%r8,%r10),%xmm1 adrp x11, .Lk_opt // lea .Lk_opt(%rip), %r11 # prepare to output transform add x11, x11, :lo12:.Lk_opt add x2, x2, #32 // add $32, %rdx tbl v0.16b, {v0.16b}, v1.16b // vpshufb %xmm1, %xmm0, %xmm0 # output permute .Lschedule_mangle_last_dec: ld1 {v20.2d,v21.2d}, [x11] // reload constants sub x2, x2, #16 // add $-16, %rdx eor v0.16b, v0.16b, v16.16b // vpxor .Lk_s63(%rip), %xmm0, %xmm0 bl _vpaes_schedule_transform // output transform st1 {v0.2d}, [x2] // vmovdqu %xmm0, (%rdx) # save last key // cleanup eor v0.16b, v0.16b, v0.16b // vpxor %xmm0, %xmm0, %xmm0 eor v1.16b, v1.16b, v1.16b // vpxor %xmm1, %xmm1, %xmm1 eor v2.16b, v2.16b, v2.16b // vpxor %xmm2, %xmm2, %xmm2 eor v3.16b, v3.16b, v3.16b // vpxor %xmm3, %xmm3, %xmm3 eor v4.16b, v4.16b, v4.16b // vpxor %xmm4, %xmm4, %xmm4 eor v5.16b, v5.16b, v5.16b // vpxor %xmm5, %xmm5, %xmm5 eor v6.16b, v6.16b, v6.16b // vpxor %xmm6, %xmm6, %xmm6 eor v7.16b, v7.16b, v7.16b // vpxor %xmm7, %xmm7, %xmm7 ldp x29, x30, [sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size _vpaes_schedule_core,.-_vpaes_schedule_core ## ## .aes_schedule_192_smear ## ## Smear the short, low side in the 192-bit key schedule. ## ## Inputs: ## %xmm7: high side, b a x y ## %xmm6: low side, d c 0 0 ## %xmm13: 0 ## ## Outputs: ## %xmm6: b+c+d b+c 0 0 ## %xmm0: b+c+d b+c b a ## .type _vpaes_schedule_192_smear,%function .align 4 _vpaes_schedule_192_smear: movi v1.16b, #0 dup v0.4s, v7.s[3] ins v1.s[3], v6.s[2] // vpshufd $0x80, %xmm6, %xmm1 # d c 0 0 -> c 0 0 0 ins v0.s[0], v7.s[2] // vpshufd $0xFE, %xmm7, %xmm0 # b a _ _ -> b b b a eor v6.16b, v6.16b, v1.16b // vpxor %xmm1, %xmm6, %xmm6 # -> c+d c 0 0 eor v1.16b, v1.16b, v1.16b // vpxor %xmm1, %xmm1, %xmm1 eor v6.16b, v6.16b, v0.16b // vpxor %xmm0, %xmm6, %xmm6 # -> b+c+d b+c b a mov v0.16b, v6.16b // vmovdqa %xmm6, %xmm0 ins v6.d[0], v1.d[0] // vmovhlps %xmm1, %xmm6, %xmm6 # clobber low side with zeros ret .size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear ## ## .aes_schedule_round ## ## Runs one main round of the key schedule on %xmm0, %xmm7 ## ## Specifically, runs subbytes on the high dword of %xmm0 ## then rotates it by one byte and xors into the low dword of ## %xmm7. ## ## Adds rcon from low byte of %xmm8, then rotates %xmm8 for ## next rcon. ## ## Smears the dwords of %xmm7 by xoring the low into the ## second low, result into third, result into highest. ## ## Returns results in %xmm7 = %xmm0. ## Clobbers %xmm1-%xmm4, %r11. ## .type _vpaes_schedule_round,%function .align 4 _vpaes_schedule_round: // extract rcon from xmm8 movi v4.16b, #0 // vpxor %xmm4, %xmm4, %xmm4 ext v1.16b, v8.16b, v4.16b, #15 // vpalignr $15, %xmm8, %xmm4, %xmm1 ext v8.16b, v8.16b, v8.16b, #15 // vpalignr $15, %xmm8, %xmm8, %xmm8 eor v7.16b, v7.16b, v1.16b // vpxor %xmm1, %xmm7, %xmm7 // rotate dup v0.4s, v0.s[3] // vpshufd $0xFF, %xmm0, %xmm0 ext v0.16b, v0.16b, v0.16b, #1 // vpalignr $1, %xmm0, %xmm0, %xmm0 // fall through... // low round: same as high round, but no rotation and no rcon. _vpaes_schedule_low_round: // smear xmm7 ext v1.16b, v4.16b, v7.16b, #12 // vpslldq $4, %xmm7, %xmm1 eor v7.16b, v7.16b, v1.16b // vpxor %xmm1, %xmm7, %xmm7 ext v4.16b, v4.16b, v7.16b, #8 // vpslldq $8, %xmm7, %xmm4 // subbytes and v1.16b, v0.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i eor v7.16b, v7.16b, v4.16b // vpxor %xmm4, %xmm7, %xmm7 tbl v2.16b, {v19.16b}, v1.16b // vpshufb %xmm1, %xmm11, %xmm2 # 2 = a/k eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j tbl v3.16b, {v18.16b}, v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 3 = iak = 1/i + a/k tbl v4.16b, {v18.16b}, v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j eor v7.16b, v7.16b, v16.16b // vpxor .Lk_s63(%rip), %xmm7, %xmm7 tbl v3.16b, {v18.16b}, v3.16b // vpshufb %xmm3, %xmm10, %xmm3 # 2 = 1/iak eor v4.16b, v4.16b, v2.16b // vpxor %xmm2, %xmm4, %xmm4 # 4 = jak = 1/j + a/k tbl v2.16b, {v18.16b}, v4.16b // vpshufb %xmm4, %xmm10, %xmm2 # 3 = 1/jak eor v3.16b, v3.16b, v1.16b // vpxor %xmm1, %xmm3, %xmm3 # 2 = io eor v2.16b, v2.16b, v0.16b // vpxor %xmm0, %xmm2, %xmm2 # 3 = jo tbl v4.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm13, %xmm4 # 4 = sbou tbl v1.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm12, %xmm1 # 0 = sb1t eor v1.16b, v1.16b, v4.16b // vpxor %xmm4, %xmm1, %xmm1 # 0 = sbox output // add in smeared stuff eor v0.16b, v1.16b, v7.16b // vpxor %xmm7, %xmm1, %xmm0 eor v7.16b, v1.16b, v7.16b // vmovdqa %xmm0, %xmm7 ret .size _vpaes_schedule_round,.-_vpaes_schedule_round ## ## .aes_schedule_transform ## ## Linear-transform %xmm0 according to tables at (%r11) ## ## Requires that %xmm9 = 0x0F0F... as in preheat ## Output in %xmm0 ## Clobbers %xmm1, %xmm2 ## .type _vpaes_schedule_transform,%function .align 4 _vpaes_schedule_transform: and v1.16b, v0.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 // vmovdqa (%r11), %xmm2 # lo tbl v2.16b, {v20.16b}, v1.16b // vpshufb %xmm1, %xmm2, %xmm2 // vmovdqa 16(%r11), %xmm1 # hi tbl v0.16b, {v21.16b}, v0.16b // vpshufb %xmm0, %xmm1, %xmm0 eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 ret .size _vpaes_schedule_transform,.-_vpaes_schedule_transform ## ## .aes_schedule_mangle ## ## Mangle xmm0 from (basis-transformed) standard version ## to our version. ## ## On encrypt, ## xor with 0x63 ## multiply by circulant 0,1,1,1 ## apply shiftrows transform ## ## On decrypt, ## xor with 0x63 ## multiply by "inverse mixcolumns" circulant E,B,D,9 ## deskew ## apply shiftrows transform ## ## ## Writes out to (%rdx), and increments or decrements it ## Keeps track of round number mod 4 in %r8 ## Preserves xmm0 ## Clobbers xmm1-xmm5 ## .type _vpaes_schedule_mangle,%function .align 4 _vpaes_schedule_mangle: mov v4.16b, v0.16b // vmovdqa %xmm0, %xmm4 # save xmm0 for later // vmovdqa .Lk_mc_forward(%rip),%xmm5 cbnz w3, .Lschedule_mangle_dec // encrypting eor v4.16b, v0.16b, v16.16b // vpxor .Lk_s63(%rip), %xmm0, %xmm4 add x2, x2, #16 // add $16, %rdx tbl v4.16b, {v4.16b}, v9.16b // vpshufb %xmm5, %xmm4, %xmm4 tbl v1.16b, {v4.16b}, v9.16b // vpshufb %xmm5, %xmm4, %xmm1 tbl v3.16b, {v1.16b}, v9.16b // vpshufb %xmm5, %xmm1, %xmm3 eor v4.16b, v4.16b, v1.16b // vpxor %xmm1, %xmm4, %xmm4 ld1 {v1.2d}, [x8] // vmovdqa (%r8,%r10), %xmm1 eor v3.16b, v3.16b, v4.16b // vpxor %xmm4, %xmm3, %xmm3 b .Lschedule_mangle_both .align 4 .Lschedule_mangle_dec: // inverse mix columns // lea .Lk_dksd(%rip),%r11 ushr v1.16b, v4.16b, #4 // vpsrlb $4, %xmm4, %xmm1 # 1 = hi and v4.16b, v4.16b, v17.16b // vpand %xmm9, %xmm4, %xmm4 # 4 = lo // vmovdqa 0x00(%r11), %xmm2 tbl v2.16b, {v24.16b}, v4.16b // vpshufb %xmm4, %xmm2, %xmm2 // vmovdqa 0x10(%r11), %xmm3 tbl v3.16b, {v25.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 tbl v3.16b, {v3.16b}, v9.16b // vpshufb %xmm5, %xmm3, %xmm3 // vmovdqa 0x20(%r11), %xmm2 tbl v2.16b, {v26.16b}, v4.16b // vpshufb %xmm4, %xmm2, %xmm2 eor v2.16b, v2.16b, v3.16b // vpxor %xmm3, %xmm2, %xmm2 // vmovdqa 0x30(%r11), %xmm3 tbl v3.16b, {v27.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 tbl v3.16b, {v3.16b}, v9.16b // vpshufb %xmm5, %xmm3, %xmm3 // vmovdqa 0x40(%r11), %xmm2 tbl v2.16b, {v28.16b}, v4.16b // vpshufb %xmm4, %xmm2, %xmm2 eor v2.16b, v2.16b, v3.16b // vpxor %xmm3, %xmm2, %xmm2 // vmovdqa 0x50(%r11), %xmm3 tbl v3.16b, {v29.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 // vmovdqa 0x60(%r11), %xmm2 tbl v2.16b, {v30.16b}, v4.16b // vpshufb %xmm4, %xmm2, %xmm2 tbl v3.16b, {v3.16b}, v9.16b // vpshufb %xmm5, %xmm3, %xmm3 // vmovdqa 0x70(%r11), %xmm4 tbl v4.16b, {v31.16b}, v1.16b // vpshufb %xmm1, %xmm4, %xmm4 ld1 {v1.2d}, [x8] // vmovdqa (%r8,%r10), %xmm1 eor v2.16b, v2.16b, v3.16b // vpxor %xmm3, %xmm2, %xmm2 eor v3.16b, v4.16b, v2.16b // vpxor %xmm2, %xmm4, %xmm3 sub x2, x2, #16 // add $-16, %rdx .Lschedule_mangle_both: tbl v3.16b, {v3.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 add x8, x8, #48 // add $-16, %r8 and x8, x8, #~(1<<6) // and $0x30, %r8 st1 {v3.2d}, [x2] // vmovdqu %xmm3, (%rdx) ret .size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle .globl vpaes_set_encrypt_key .hidden vpaes_set_encrypt_key .type vpaes_set_encrypt_key,%function .align 4 vpaes_set_encrypt_key: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#5] // kFlag_vpaes_set_encrypt_key #endif AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 stp d8,d9,[sp,#-16]! // ABI spec says so lsr w9, w1, #5 // shr $5,%eax add w9, w9, #5 // $5,%eax str w9, [x2,#240] // mov %eax,240(%rdx) # AES_KEY->rounds = nbits/32+5; mov w3, #0 // mov $0,%ecx mov x8, #0x30 // mov $0x30,%r8d bl _vpaes_schedule_core eor x0, x0, x0 ldp d8,d9,[sp],#16 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size vpaes_set_encrypt_key,.-vpaes_set_encrypt_key .globl vpaes_set_decrypt_key .hidden vpaes_set_decrypt_key .type vpaes_set_decrypt_key,%function .align 4 vpaes_set_decrypt_key: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 stp d8,d9,[sp,#-16]! // ABI spec says so lsr w9, w1, #5 // shr $5,%eax add w9, w9, #5 // $5,%eax str w9, [x2,#240] // mov %eax,240(%rdx) # AES_KEY->rounds = nbits/32+5; lsl w9, w9, #4 // shl $4,%eax add x2, x2, #16 // lea 16(%rdx,%rax),%rdx add x2, x2, x9 mov w3, #1 // mov $1,%ecx lsr w8, w1, #1 // shr $1,%r8d and x8, x8, #32 // and $32,%r8d eor x8, x8, #32 // xor $32,%r8d # nbits==192?0:32 bl _vpaes_schedule_core ldp d8,d9,[sp],#16 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size vpaes_set_decrypt_key,.-vpaes_set_decrypt_key .globl vpaes_cbc_encrypt .hidden vpaes_cbc_encrypt .type vpaes_cbc_encrypt,%function .align 4 vpaes_cbc_encrypt: AARCH64_SIGN_LINK_REGISTER cbz x2, .Lcbc_abort cmp w5, #0 // check direction b.eq vpaes_cbc_decrypt stp x29,x30,[sp,#-16]! add x29,sp,#0 mov x17, x2 // reassign mov x2, x3 // reassign ld1 {v0.16b}, [x4] // load ivec bl _vpaes_encrypt_preheat b .Lcbc_enc_loop .align 4 .Lcbc_enc_loop: ld1 {v7.16b}, [x0],#16 // load input eor v7.16b, v7.16b, v0.16b // xor with ivec bl _vpaes_encrypt_core st1 {v0.16b}, [x1],#16 // save output subs x17, x17, #16 b.hi .Lcbc_enc_loop st1 {v0.16b}, [x4] // write ivec ldp x29,x30,[sp],#16 .Lcbc_abort: AARCH64_VALIDATE_LINK_REGISTER ret .size vpaes_cbc_encrypt,.-vpaes_cbc_encrypt .type vpaes_cbc_decrypt,%function .align 4 vpaes_cbc_decrypt: // Not adding AARCH64_SIGN_LINK_REGISTER here because vpaes_cbc_decrypt is jumped to // only from vpaes_cbc_encrypt which has already signed the return address. stp x29,x30,[sp,#-16]! add x29,sp,#0 stp d8,d9,[sp,#-16]! // ABI spec says so stp d10,d11,[sp,#-16]! stp d12,d13,[sp,#-16]! stp d14,d15,[sp,#-16]! mov x17, x2 // reassign mov x2, x3 // reassign ld1 {v6.16b}, [x4] // load ivec bl _vpaes_decrypt_preheat tst x17, #16 b.eq .Lcbc_dec_loop2x ld1 {v7.16b}, [x0], #16 // load input bl _vpaes_decrypt_core eor v0.16b, v0.16b, v6.16b // xor with ivec orr v6.16b, v7.16b, v7.16b // next ivec value st1 {v0.16b}, [x1], #16 subs x17, x17, #16 b.ls .Lcbc_dec_done .align 4 .Lcbc_dec_loop2x: ld1 {v14.16b,v15.16b}, [x0], #32 bl _vpaes_decrypt_2x eor v0.16b, v0.16b, v6.16b // xor with ivec eor v1.16b, v1.16b, v14.16b orr v6.16b, v15.16b, v15.16b st1 {v0.16b,v1.16b}, [x1], #32 subs x17, x17, #32 b.hi .Lcbc_dec_loop2x .Lcbc_dec_done: st1 {v6.16b}, [x4] ldp d14,d15,[sp],#16 ldp d12,d13,[sp],#16 ldp d10,d11,[sp],#16 ldp d8,d9,[sp],#16 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size vpaes_cbc_decrypt,.-vpaes_cbc_decrypt .globl vpaes_ctr32_encrypt_blocks .hidden vpaes_ctr32_encrypt_blocks .type vpaes_ctr32_encrypt_blocks,%function .align 4 vpaes_ctr32_encrypt_blocks: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 stp d8,d9,[sp,#-16]! // ABI spec says so stp d10,d11,[sp,#-16]! stp d12,d13,[sp,#-16]! stp d14,d15,[sp,#-16]! cbz x2, .Lctr32_done // Note, unlike the other functions, x2 here is measured in blocks, // not bytes. mov x17, x2 mov x2, x3 // Load the IV and counter portion. ldr w6, [x4, #12] ld1 {v7.16b}, [x4] bl _vpaes_encrypt_preheat tst x17, #1 rev w6, w6 // The counter is big-endian. b.eq .Lctr32_prep_loop // Handle one block so the remaining block count is even for // _vpaes_encrypt_2x. ld1 {v6.16b}, [x0], #16 // .Load input ahead of time bl _vpaes_encrypt_core eor v0.16b, v0.16b, v6.16b // XOR input and result st1 {v0.16b}, [x1], #16 subs x17, x17, #1 // Update the counter. add w6, w6, #1 rev w7, w6 mov v7.s[3], w7 b.ls .Lctr32_done .Lctr32_prep_loop: // _vpaes_encrypt_core takes its input from v7, while _vpaes_encrypt_2x // uses v14 and v15. mov v15.16b, v7.16b mov v14.16b, v7.16b add w6, w6, #1 rev w7, w6 mov v15.s[3], w7 .Lctr32_loop: ld1 {v6.16b,v7.16b}, [x0], #32 // .Load input ahead of time bl _vpaes_encrypt_2x eor v0.16b, v0.16b, v6.16b // XOR input and result eor v1.16b, v1.16b, v7.16b // XOR input and result (#2) st1 {v0.16b,v1.16b}, [x1], #32 subs x17, x17, #2 // Update the counter. add w7, w6, #1 add w6, w6, #2 rev w7, w7 mov v14.s[3], w7 rev w7, w6 mov v15.s[3], w7 b.hi .Lctr32_loop .Lctr32_done: ldp d14,d15,[sp],#16 ldp d12,d13,[sp],#16 ldp d10,d11,[sp],#16 ldp d8,d9,[sp],#16 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size vpaes_ctr32_encrypt_blocks,.-vpaes_ctr32_encrypt_blocks #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
11,053
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/ghash-neon-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .text .globl gcm_init_neon .hidden gcm_init_neon .type gcm_init_neon,%function .align 4 gcm_init_neon: AARCH64_VALID_CALL_TARGET // This function is adapted from gcm_init_v8. xC2 is t3. ld1 {v17.2d}, [x1] // load H movi v19.16b, #0xe1 shl v19.2d, v19.2d, #57 // 0xc2.0 ext v3.16b, v17.16b, v17.16b, #8 ushr v18.2d, v19.2d, #63 dup v17.4s, v17.s[1] ext v16.16b, v18.16b, v19.16b, #8 // t0=0xc2....01 ushr v18.2d, v3.2d, #63 sshr v17.4s, v17.4s, #31 // broadcast carry bit and v18.16b, v18.16b, v16.16b shl v3.2d, v3.2d, #1 ext v18.16b, v18.16b, v18.16b, #8 and v16.16b, v16.16b, v17.16b orr v3.16b, v3.16b, v18.16b // H<<<=1 eor v5.16b, v3.16b, v16.16b // twisted H st1 {v5.2d}, [x0] // store Htable[0] ret .size gcm_init_neon,.-gcm_init_neon .globl gcm_gmult_neon .hidden gcm_gmult_neon .type gcm_gmult_neon,%function .align 4 gcm_gmult_neon: AARCH64_VALID_CALL_TARGET ld1 {v3.16b}, [x0] // load Xi ld1 {v5.1d}, [x1], #8 // load twisted H ld1 {v6.1d}, [x1] adrp x9, .Lmasks // load constants add x9, x9, :lo12:.Lmasks ld1 {v24.2d, v25.2d}, [x9] rev64 v3.16b, v3.16b // byteswap Xi ext v3.16b, v3.16b, v3.16b, #8 eor v7.8b, v5.8b, v6.8b // Karatsuba pre-processing mov x3, #16 b .Lgmult_neon .size gcm_gmult_neon,.-gcm_gmult_neon .globl gcm_ghash_neon .hidden gcm_ghash_neon .type gcm_ghash_neon,%function .align 4 gcm_ghash_neon: AARCH64_VALID_CALL_TARGET ld1 {v0.16b}, [x0] // load Xi ld1 {v5.1d}, [x1], #8 // load twisted H ld1 {v6.1d}, [x1] adrp x9, .Lmasks // load constants add x9, x9, :lo12:.Lmasks ld1 {v24.2d, v25.2d}, [x9] rev64 v0.16b, v0.16b // byteswap Xi ext v0.16b, v0.16b, v0.16b, #8 eor v7.8b, v5.8b, v6.8b // Karatsuba pre-processing .Loop_neon: ld1 {v3.16b}, [x2], #16 // load inp rev64 v3.16b, v3.16b // byteswap inp ext v3.16b, v3.16b, v3.16b, #8 eor v3.16b, v3.16b, v0.16b // inp ^= Xi .Lgmult_neon: // Split the input into v3 and v4. (The upper halves are unused, // so it is okay to leave them alone.) ins v4.d[0], v3.d[1] ext v16.8b, v5.8b, v5.8b, #1 // A1 pmull v16.8h, v16.8b, v3.8b // F = A1*B ext v0.8b, v3.8b, v3.8b, #1 // B1 pmull v0.8h, v5.8b, v0.8b // E = A*B1 ext v17.8b, v5.8b, v5.8b, #2 // A2 pmull v17.8h, v17.8b, v3.8b // H = A2*B ext v19.8b, v3.8b, v3.8b, #2 // B2 pmull v19.8h, v5.8b, v19.8b // G = A*B2 ext v18.8b, v5.8b, v5.8b, #3 // A3 eor v16.16b, v16.16b, v0.16b // L = E + F pmull v18.8h, v18.8b, v3.8b // J = A3*B ext v0.8b, v3.8b, v3.8b, #3 // B3 eor v17.16b, v17.16b, v19.16b // M = G + H pmull v0.8h, v5.8b, v0.8b // I = A*B3 // Here we diverge from the 32-bit version. It computes the following // (instructions reordered for clarity): // // veor $t0#lo, $t0#lo, $t0#hi @ t0 = P0 + P1 (L) // vand $t0#hi, $t0#hi, $k48 // veor $t0#lo, $t0#lo, $t0#hi // // veor $t1#lo, $t1#lo, $t1#hi @ t1 = P2 + P3 (M) // vand $t1#hi, $t1#hi, $k32 // veor $t1#lo, $t1#lo, $t1#hi // // veor $t2#lo, $t2#lo, $t2#hi @ t2 = P4 + P5 (N) // vand $t2#hi, $t2#hi, $k16 // veor $t2#lo, $t2#lo, $t2#hi // // veor $t3#lo, $t3#lo, $t3#hi @ t3 = P6 + P7 (K) // vmov.i64 $t3#hi, #0 // // $kN is a mask with the bottom N bits set. AArch64 cannot compute on // upper halves of SIMD registers, so we must split each half into // separate registers. To compensate, we pair computations up and // parallelize. ext v19.8b, v3.8b, v3.8b, #4 // B4 eor v18.16b, v18.16b, v0.16b // N = I + J pmull v19.8h, v5.8b, v19.8b // K = A*B4 // This can probably be scheduled more efficiently. For now, we just // pair up independent instructions. zip1 v20.2d, v16.2d, v17.2d zip1 v22.2d, v18.2d, v19.2d zip2 v21.2d, v16.2d, v17.2d zip2 v23.2d, v18.2d, v19.2d eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b and v21.16b, v21.16b, v24.16b and v23.16b, v23.16b, v25.16b eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b zip1 v16.2d, v20.2d, v21.2d zip1 v18.2d, v22.2d, v23.2d zip2 v17.2d, v20.2d, v21.2d zip2 v19.2d, v22.2d, v23.2d ext v16.16b, v16.16b, v16.16b, #15 // t0 = t0 << 8 ext v17.16b, v17.16b, v17.16b, #14 // t1 = t1 << 16 pmull v0.8h, v5.8b, v3.8b // D = A*B ext v19.16b, v19.16b, v19.16b, #12 // t3 = t3 << 32 ext v18.16b, v18.16b, v18.16b, #13 // t2 = t2 << 24 eor v16.16b, v16.16b, v17.16b eor v18.16b, v18.16b, v19.16b eor v0.16b, v0.16b, v16.16b eor v0.16b, v0.16b, v18.16b eor v3.8b, v3.8b, v4.8b // Karatsuba pre-processing ext v16.8b, v7.8b, v7.8b, #1 // A1 pmull v16.8h, v16.8b, v3.8b // F = A1*B ext v1.8b, v3.8b, v3.8b, #1 // B1 pmull v1.8h, v7.8b, v1.8b // E = A*B1 ext v17.8b, v7.8b, v7.8b, #2 // A2 pmull v17.8h, v17.8b, v3.8b // H = A2*B ext v19.8b, v3.8b, v3.8b, #2 // B2 pmull v19.8h, v7.8b, v19.8b // G = A*B2 ext v18.8b, v7.8b, v7.8b, #3 // A3 eor v16.16b, v16.16b, v1.16b // L = E + F pmull v18.8h, v18.8b, v3.8b // J = A3*B ext v1.8b, v3.8b, v3.8b, #3 // B3 eor v17.16b, v17.16b, v19.16b // M = G + H pmull v1.8h, v7.8b, v1.8b // I = A*B3 // Here we diverge from the 32-bit version. It computes the following // (instructions reordered for clarity): // // veor $t0#lo, $t0#lo, $t0#hi @ t0 = P0 + P1 (L) // vand $t0#hi, $t0#hi, $k48 // veor $t0#lo, $t0#lo, $t0#hi // // veor $t1#lo, $t1#lo, $t1#hi @ t1 = P2 + P3 (M) // vand $t1#hi, $t1#hi, $k32 // veor $t1#lo, $t1#lo, $t1#hi // // veor $t2#lo, $t2#lo, $t2#hi @ t2 = P4 + P5 (N) // vand $t2#hi, $t2#hi, $k16 // veor $t2#lo, $t2#lo, $t2#hi // // veor $t3#lo, $t3#lo, $t3#hi @ t3 = P6 + P7 (K) // vmov.i64 $t3#hi, #0 // // $kN is a mask with the bottom N bits set. AArch64 cannot compute on // upper halves of SIMD registers, so we must split each half into // separate registers. To compensate, we pair computations up and // parallelize. ext v19.8b, v3.8b, v3.8b, #4 // B4 eor v18.16b, v18.16b, v1.16b // N = I + J pmull v19.8h, v7.8b, v19.8b // K = A*B4 // This can probably be scheduled more efficiently. For now, we just // pair up independent instructions. zip1 v20.2d, v16.2d, v17.2d zip1 v22.2d, v18.2d, v19.2d zip2 v21.2d, v16.2d, v17.2d zip2 v23.2d, v18.2d, v19.2d eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b and v21.16b, v21.16b, v24.16b and v23.16b, v23.16b, v25.16b eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b zip1 v16.2d, v20.2d, v21.2d zip1 v18.2d, v22.2d, v23.2d zip2 v17.2d, v20.2d, v21.2d zip2 v19.2d, v22.2d, v23.2d ext v16.16b, v16.16b, v16.16b, #15 // t0 = t0 << 8 ext v17.16b, v17.16b, v17.16b, #14 // t1 = t1 << 16 pmull v1.8h, v7.8b, v3.8b // D = A*B ext v19.16b, v19.16b, v19.16b, #12 // t3 = t3 << 32 ext v18.16b, v18.16b, v18.16b, #13 // t2 = t2 << 24 eor v16.16b, v16.16b, v17.16b eor v18.16b, v18.16b, v19.16b eor v1.16b, v1.16b, v16.16b eor v1.16b, v1.16b, v18.16b ext v16.8b, v6.8b, v6.8b, #1 // A1 pmull v16.8h, v16.8b, v4.8b // F = A1*B ext v2.8b, v4.8b, v4.8b, #1 // B1 pmull v2.8h, v6.8b, v2.8b // E = A*B1 ext v17.8b, v6.8b, v6.8b, #2 // A2 pmull v17.8h, v17.8b, v4.8b // H = A2*B ext v19.8b, v4.8b, v4.8b, #2 // B2 pmull v19.8h, v6.8b, v19.8b // G = A*B2 ext v18.8b, v6.8b, v6.8b, #3 // A3 eor v16.16b, v16.16b, v2.16b // L = E + F pmull v18.8h, v18.8b, v4.8b // J = A3*B ext v2.8b, v4.8b, v4.8b, #3 // B3 eor v17.16b, v17.16b, v19.16b // M = G + H pmull v2.8h, v6.8b, v2.8b // I = A*B3 // Here we diverge from the 32-bit version. It computes the following // (instructions reordered for clarity): // // veor $t0#lo, $t0#lo, $t0#hi @ t0 = P0 + P1 (L) // vand $t0#hi, $t0#hi, $k48 // veor $t0#lo, $t0#lo, $t0#hi // // veor $t1#lo, $t1#lo, $t1#hi @ t1 = P2 + P3 (M) // vand $t1#hi, $t1#hi, $k32 // veor $t1#lo, $t1#lo, $t1#hi // // veor $t2#lo, $t2#lo, $t2#hi @ t2 = P4 + P5 (N) // vand $t2#hi, $t2#hi, $k16 // veor $t2#lo, $t2#lo, $t2#hi // // veor $t3#lo, $t3#lo, $t3#hi @ t3 = P6 + P7 (K) // vmov.i64 $t3#hi, #0 // // $kN is a mask with the bottom N bits set. AArch64 cannot compute on // upper halves of SIMD registers, so we must split each half into // separate registers. To compensate, we pair computations up and // parallelize. ext v19.8b, v4.8b, v4.8b, #4 // B4 eor v18.16b, v18.16b, v2.16b // N = I + J pmull v19.8h, v6.8b, v19.8b // K = A*B4 // This can probably be scheduled more efficiently. For now, we just // pair up independent instructions. zip1 v20.2d, v16.2d, v17.2d zip1 v22.2d, v18.2d, v19.2d zip2 v21.2d, v16.2d, v17.2d zip2 v23.2d, v18.2d, v19.2d eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b and v21.16b, v21.16b, v24.16b and v23.16b, v23.16b, v25.16b eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b zip1 v16.2d, v20.2d, v21.2d zip1 v18.2d, v22.2d, v23.2d zip2 v17.2d, v20.2d, v21.2d zip2 v19.2d, v22.2d, v23.2d ext v16.16b, v16.16b, v16.16b, #15 // t0 = t0 << 8 ext v17.16b, v17.16b, v17.16b, #14 // t1 = t1 << 16 pmull v2.8h, v6.8b, v4.8b // D = A*B ext v19.16b, v19.16b, v19.16b, #12 // t3 = t3 << 32 ext v18.16b, v18.16b, v18.16b, #13 // t2 = t2 << 24 eor v16.16b, v16.16b, v17.16b eor v18.16b, v18.16b, v19.16b eor v2.16b, v2.16b, v16.16b eor v2.16b, v2.16b, v18.16b ext v16.16b, v0.16b, v2.16b, #8 eor v1.16b, v1.16b, v0.16b // Karatsuba post-processing eor v1.16b, v1.16b, v2.16b eor v1.16b, v1.16b, v16.16b // Xm overlaps Xh.lo and Xl.hi ins v0.d[1], v1.d[0] // Xh|Xl - 256-bit result // This is a no-op due to the ins instruction below. // ins v2.d[0], v1.d[1] // equivalent of reduction_avx from ghash-x86_64.pl shl v17.2d, v0.2d, #57 // 1st phase shl v18.2d, v0.2d, #62 eor v18.16b, v18.16b, v17.16b // shl v17.2d, v0.2d, #63 eor v18.16b, v18.16b, v17.16b // // Note Xm contains {Xl.d[1], Xh.d[0]}. eor v18.16b, v18.16b, v1.16b ins v0.d[1], v18.d[0] // Xl.d[1] ^= t2.d[0] ins v2.d[0], v18.d[1] // Xh.d[0] ^= t2.d[1] ushr v18.2d, v0.2d, #1 // 2nd phase eor v2.16b, v2.16b,v0.16b eor v0.16b, v0.16b,v18.16b // ushr v18.2d, v18.2d, #6 ushr v0.2d, v0.2d, #1 // eor v0.16b, v0.16b, v2.16b // eor v0.16b, v0.16b, v18.16b // subs x3, x3, #16 bne .Loop_neon rev64 v0.16b, v0.16b // byteswap Xi and write ext v0.16b, v0.16b, v0.16b, #8 st1 {v0.16b}, [x0] ret .size gcm_ghash_neon,.-gcm_ghash_neon .section .rodata .align 4 .Lmasks: .quad 0x0000ffffffffffff // k48 .quad 0x00000000ffffffff // k32 .quad 0x000000000000ffff // k16 .quad 0x0000000000000000 // k0 .byte 71,72,65,83,72,32,102,111,114,32,65,82,77,118,56,44,32,100,101,114,105,118,101,100,32,102,114,111,109,32,65,82,77,118,52,32,118,101,114,115,105,111,110,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
28,392
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/sha1-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .text .globl sha1_block_data_order_nohw .hidden sha1_block_data_order_nohw .type sha1_block_data_order_nohw,%function .align 6 sha1_block_data_order_nohw: // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-96]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] ldp w20,w21,[x0] ldp w22,w23,[x0,#8] ldr w24,[x0,#16] .Loop: ldr x3,[x1],#64 movz w28,#0x7999 sub x2,x2,#1 movk w28,#0x5a82,lsl#16 #ifdef __AARCH64EB__ ror x3,x3,#32 #else rev32 x3,x3 #endif add w24,w24,w28 // warm it up add w24,w24,w3 lsr x4,x3,#32 ldr x5,[x1,#-56] bic w25,w23,w21 and w26,w22,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K orr w25,w25,w26 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 add w23,w23,w4 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x5,x5,#32 #else rev32 x5,x5 #endif bic w25,w22,w20 and w26,w21,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K orr w25,w25,w26 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 add w22,w22,w5 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) lsr x6,x5,#32 ldr x7,[x1,#-48] bic w25,w21,w24 and w26,w20,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K orr w25,w25,w26 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 add w21,w21,w6 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x7,x7,#32 #else rev32 x7,x7 #endif bic w25,w20,w23 and w26,w24,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K orr w25,w25,w26 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 add w20,w20,w7 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) lsr x8,x7,#32 ldr x9,[x1,#-40] bic w25,w24,w22 and w26,w23,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K orr w25,w25,w26 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 add w24,w24,w8 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x9,x9,#32 #else rev32 x9,x9 #endif bic w25,w23,w21 and w26,w22,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K orr w25,w25,w26 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 add w23,w23,w9 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) lsr x10,x9,#32 ldr x11,[x1,#-32] bic w25,w22,w20 and w26,w21,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K orr w25,w25,w26 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 add w22,w22,w10 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x11,x11,#32 #else rev32 x11,x11 #endif bic w25,w21,w24 and w26,w20,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K orr w25,w25,w26 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 add w21,w21,w11 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) lsr x12,x11,#32 ldr x13,[x1,#-24] bic w25,w20,w23 and w26,w24,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K orr w25,w25,w26 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 add w20,w20,w12 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x13,x13,#32 #else rev32 x13,x13 #endif bic w25,w24,w22 and w26,w23,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K orr w25,w25,w26 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 add w24,w24,w13 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) lsr x14,x13,#32 ldr x15,[x1,#-16] bic w25,w23,w21 and w26,w22,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K orr w25,w25,w26 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 add w23,w23,w14 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x15,x15,#32 #else rev32 x15,x15 #endif bic w25,w22,w20 and w26,w21,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K orr w25,w25,w26 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 add w22,w22,w15 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) lsr x16,x15,#32 ldr x17,[x1,#-8] bic w25,w21,w24 and w26,w20,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K orr w25,w25,w26 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 add w21,w21,w16 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x17,x17,#32 #else rev32 x17,x17 #endif bic w25,w20,w23 and w26,w24,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K orr w25,w25,w26 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 add w20,w20,w17 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) lsr x19,x17,#32 eor w3,w3,w5 bic w25,w24,w22 and w26,w23,w22 ror w27,w21,#27 eor w3,w3,w11 add w24,w24,w28 // future e+=K orr w25,w25,w26 add w20,w20,w27 // e+=rot(a,5) eor w3,w3,w16 ror w22,w22,#2 add w24,w24,w19 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w3,w3,#31 eor w4,w4,w6 bic w25,w23,w21 and w26,w22,w21 ror w27,w20,#27 eor w4,w4,w12 add w23,w23,w28 // future e+=K orr w25,w25,w26 add w24,w24,w27 // e+=rot(a,5) eor w4,w4,w17 ror w21,w21,#2 add w23,w23,w3 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w4,w4,#31 eor w5,w5,w7 bic w25,w22,w20 and w26,w21,w20 ror w27,w24,#27 eor w5,w5,w13 add w22,w22,w28 // future e+=K orr w25,w25,w26 add w23,w23,w27 // e+=rot(a,5) eor w5,w5,w19 ror w20,w20,#2 add w22,w22,w4 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w5,w5,#31 eor w6,w6,w8 bic w25,w21,w24 and w26,w20,w24 ror w27,w23,#27 eor w6,w6,w14 add w21,w21,w28 // future e+=K orr w25,w25,w26 add w22,w22,w27 // e+=rot(a,5) eor w6,w6,w3 ror w24,w24,#2 add w21,w21,w5 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w6,w6,#31 eor w7,w7,w9 bic w25,w20,w23 and w26,w24,w23 ror w27,w22,#27 eor w7,w7,w15 add w20,w20,w28 // future e+=K orr w25,w25,w26 add w21,w21,w27 // e+=rot(a,5) eor w7,w7,w4 ror w23,w23,#2 add w20,w20,w6 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w7,w7,#31 movz w28,#0xeba1 movk w28,#0x6ed9,lsl#16 eor w8,w8,w10 bic w25,w24,w22 and w26,w23,w22 ror w27,w21,#27 eor w8,w8,w16 add w24,w24,w28 // future e+=K orr w25,w25,w26 add w20,w20,w27 // e+=rot(a,5) eor w8,w8,w5 ror w22,w22,#2 add w24,w24,w7 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w8,w8,#31 eor w9,w9,w11 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w9,w9,w17 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w9,w9,w6 add w23,w23,w8 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w9,w9,#31 eor w10,w10,w12 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w10,w10,w19 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w10,w10,w7 add w22,w22,w9 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w10,w10,#31 eor w11,w11,w13 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w11,w11,w3 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w11,w11,w8 add w21,w21,w10 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w11,w11,#31 eor w12,w12,w14 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w12,w12,w4 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w12,w12,w9 add w20,w20,w11 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w12,w12,#31 eor w13,w13,w15 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w13,w13,w5 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w13,w13,w10 add w24,w24,w12 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w13,w13,#31 eor w14,w14,w16 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w14,w14,w6 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w14,w14,w11 add w23,w23,w13 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w14,w14,#31 eor w15,w15,w17 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w15,w15,w7 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w15,w15,w12 add w22,w22,w14 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w15,w15,#31 eor w16,w16,w19 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w16,w16,w8 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w16,w16,w13 add w21,w21,w15 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w16,w16,#31 eor w17,w17,w3 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w17,w17,w9 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w17,w17,w14 add w20,w20,w16 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w17,w17,#31 eor w19,w19,w4 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w19,w19,w10 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w19,w19,w15 add w24,w24,w17 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w19,w19,#31 eor w3,w3,w5 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w3,w3,w11 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w3,w3,w16 add w23,w23,w19 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w3,w3,#31 eor w4,w4,w6 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w4,w4,w12 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w4,w4,w17 add w22,w22,w3 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w4,w4,#31 eor w5,w5,w7 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w5,w5,w13 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w5,w5,w19 add w21,w21,w4 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w5,w5,#31 eor w6,w6,w8 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w6,w6,w14 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w6,w6,w3 add w20,w20,w5 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w6,w6,#31 eor w7,w7,w9 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w7,w7,w15 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w7,w7,w4 add w24,w24,w6 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w7,w7,#31 eor w8,w8,w10 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w8,w8,w16 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w8,w8,w5 add w23,w23,w7 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w8,w8,#31 eor w9,w9,w11 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w9,w9,w17 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w9,w9,w6 add w22,w22,w8 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w9,w9,#31 eor w10,w10,w12 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w10,w10,w19 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w10,w10,w7 add w21,w21,w9 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w10,w10,#31 eor w11,w11,w13 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w11,w11,w3 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w11,w11,w8 add w20,w20,w10 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w11,w11,#31 movz w28,#0xbcdc movk w28,#0x8f1b,lsl#16 eor w12,w12,w14 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w12,w12,w4 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w12,w12,w9 add w24,w24,w11 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w12,w12,#31 orr w25,w21,w22 and w26,w21,w22 eor w13,w13,w15 ror w27,w20,#27 and w25,w25,w23 add w23,w23,w28 // future e+=K eor w13,w13,w5 add w24,w24,w27 // e+=rot(a,5) orr w25,w25,w26 ror w21,w21,#2 eor w13,w13,w10 add w23,w23,w12 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w13,w13,#31 orr w25,w20,w21 and w26,w20,w21 eor w14,w14,w16 ror w27,w24,#27 and w25,w25,w22 add w22,w22,w28 // future e+=K eor w14,w14,w6 add w23,w23,w27 // e+=rot(a,5) orr w25,w25,w26 ror w20,w20,#2 eor w14,w14,w11 add w22,w22,w13 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w14,w14,#31 orr w25,w24,w20 and w26,w24,w20 eor w15,w15,w17 ror w27,w23,#27 and w25,w25,w21 add w21,w21,w28 // future e+=K eor w15,w15,w7 add w22,w22,w27 // e+=rot(a,5) orr w25,w25,w26 ror w24,w24,#2 eor w15,w15,w12 add w21,w21,w14 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w15,w15,#31 orr w25,w23,w24 and w26,w23,w24 eor w16,w16,w19 ror w27,w22,#27 and w25,w25,w20 add w20,w20,w28 // future e+=K eor w16,w16,w8 add w21,w21,w27 // e+=rot(a,5) orr w25,w25,w26 ror w23,w23,#2 eor w16,w16,w13 add w20,w20,w15 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w16,w16,#31 orr w25,w22,w23 and w26,w22,w23 eor w17,w17,w3 ror w27,w21,#27 and w25,w25,w24 add w24,w24,w28 // future e+=K eor w17,w17,w9 add w20,w20,w27 // e+=rot(a,5) orr w25,w25,w26 ror w22,w22,#2 eor w17,w17,w14 add w24,w24,w16 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w17,w17,#31 orr w25,w21,w22 and w26,w21,w22 eor w19,w19,w4 ror w27,w20,#27 and w25,w25,w23 add w23,w23,w28 // future e+=K eor w19,w19,w10 add w24,w24,w27 // e+=rot(a,5) orr w25,w25,w26 ror w21,w21,#2 eor w19,w19,w15 add w23,w23,w17 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w19,w19,#31 orr w25,w20,w21 and w26,w20,w21 eor w3,w3,w5 ror w27,w24,#27 and w25,w25,w22 add w22,w22,w28 // future e+=K eor w3,w3,w11 add w23,w23,w27 // e+=rot(a,5) orr w25,w25,w26 ror w20,w20,#2 eor w3,w3,w16 add w22,w22,w19 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w3,w3,#31 orr w25,w24,w20 and w26,w24,w20 eor w4,w4,w6 ror w27,w23,#27 and w25,w25,w21 add w21,w21,w28 // future e+=K eor w4,w4,w12 add w22,w22,w27 // e+=rot(a,5) orr w25,w25,w26 ror w24,w24,#2 eor w4,w4,w17 add w21,w21,w3 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w4,w4,#31 orr w25,w23,w24 and w26,w23,w24 eor w5,w5,w7 ror w27,w22,#27 and w25,w25,w20 add w20,w20,w28 // future e+=K eor w5,w5,w13 add w21,w21,w27 // e+=rot(a,5) orr w25,w25,w26 ror w23,w23,#2 eor w5,w5,w19 add w20,w20,w4 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w5,w5,#31 orr w25,w22,w23 and w26,w22,w23 eor w6,w6,w8 ror w27,w21,#27 and w25,w25,w24 add w24,w24,w28 // future e+=K eor w6,w6,w14 add w20,w20,w27 // e+=rot(a,5) orr w25,w25,w26 ror w22,w22,#2 eor w6,w6,w3 add w24,w24,w5 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w6,w6,#31 orr w25,w21,w22 and w26,w21,w22 eor w7,w7,w9 ror w27,w20,#27 and w25,w25,w23 add w23,w23,w28 // future e+=K eor w7,w7,w15 add w24,w24,w27 // e+=rot(a,5) orr w25,w25,w26 ror w21,w21,#2 eor w7,w7,w4 add w23,w23,w6 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w7,w7,#31 orr w25,w20,w21 and w26,w20,w21 eor w8,w8,w10 ror w27,w24,#27 and w25,w25,w22 add w22,w22,w28 // future e+=K eor w8,w8,w16 add w23,w23,w27 // e+=rot(a,5) orr w25,w25,w26 ror w20,w20,#2 eor w8,w8,w5 add w22,w22,w7 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w8,w8,#31 orr w25,w24,w20 and w26,w24,w20 eor w9,w9,w11 ror w27,w23,#27 and w25,w25,w21 add w21,w21,w28 // future e+=K eor w9,w9,w17 add w22,w22,w27 // e+=rot(a,5) orr w25,w25,w26 ror w24,w24,#2 eor w9,w9,w6 add w21,w21,w8 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w9,w9,#31 orr w25,w23,w24 and w26,w23,w24 eor w10,w10,w12 ror w27,w22,#27 and w25,w25,w20 add w20,w20,w28 // future e+=K eor w10,w10,w19 add w21,w21,w27 // e+=rot(a,5) orr w25,w25,w26 ror w23,w23,#2 eor w10,w10,w7 add w20,w20,w9 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w10,w10,#31 orr w25,w22,w23 and w26,w22,w23 eor w11,w11,w13 ror w27,w21,#27 and w25,w25,w24 add w24,w24,w28 // future e+=K eor w11,w11,w3 add w20,w20,w27 // e+=rot(a,5) orr w25,w25,w26 ror w22,w22,#2 eor w11,w11,w8 add w24,w24,w10 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w11,w11,#31 orr w25,w21,w22 and w26,w21,w22 eor w12,w12,w14 ror w27,w20,#27 and w25,w25,w23 add w23,w23,w28 // future e+=K eor w12,w12,w4 add w24,w24,w27 // e+=rot(a,5) orr w25,w25,w26 ror w21,w21,#2 eor w12,w12,w9 add w23,w23,w11 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w12,w12,#31 orr w25,w20,w21 and w26,w20,w21 eor w13,w13,w15 ror w27,w24,#27 and w25,w25,w22 add w22,w22,w28 // future e+=K eor w13,w13,w5 add w23,w23,w27 // e+=rot(a,5) orr w25,w25,w26 ror w20,w20,#2 eor w13,w13,w10 add w22,w22,w12 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w13,w13,#31 orr w25,w24,w20 and w26,w24,w20 eor w14,w14,w16 ror w27,w23,#27 and w25,w25,w21 add w21,w21,w28 // future e+=K eor w14,w14,w6 add w22,w22,w27 // e+=rot(a,5) orr w25,w25,w26 ror w24,w24,#2 eor w14,w14,w11 add w21,w21,w13 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w14,w14,#31 orr w25,w23,w24 and w26,w23,w24 eor w15,w15,w17 ror w27,w22,#27 and w25,w25,w20 add w20,w20,w28 // future e+=K eor w15,w15,w7 add w21,w21,w27 // e+=rot(a,5) orr w25,w25,w26 ror w23,w23,#2 eor w15,w15,w12 add w20,w20,w14 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w15,w15,#31 movz w28,#0xc1d6 movk w28,#0xca62,lsl#16 orr w25,w22,w23 and w26,w22,w23 eor w16,w16,w19 ror w27,w21,#27 and w25,w25,w24 add w24,w24,w28 // future e+=K eor w16,w16,w8 add w20,w20,w27 // e+=rot(a,5) orr w25,w25,w26 ror w22,w22,#2 eor w16,w16,w13 add w24,w24,w15 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w16,w16,#31 eor w17,w17,w3 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w17,w17,w9 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w17,w17,w14 add w23,w23,w16 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w17,w17,#31 eor w19,w19,w4 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w19,w19,w10 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w19,w19,w15 add w22,w22,w17 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w19,w19,#31 eor w3,w3,w5 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w3,w3,w11 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w3,w3,w16 add w21,w21,w19 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w3,w3,#31 eor w4,w4,w6 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w4,w4,w12 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w4,w4,w17 add w20,w20,w3 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w4,w4,#31 eor w5,w5,w7 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w5,w5,w13 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w5,w5,w19 add w24,w24,w4 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w5,w5,#31 eor w6,w6,w8 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w6,w6,w14 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w6,w6,w3 add w23,w23,w5 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w6,w6,#31 eor w7,w7,w9 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w7,w7,w15 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w7,w7,w4 add w22,w22,w6 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w7,w7,#31 eor w8,w8,w10 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w8,w8,w16 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w8,w8,w5 add w21,w21,w7 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w8,w8,#31 eor w9,w9,w11 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w9,w9,w17 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w9,w9,w6 add w20,w20,w8 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w9,w9,#31 eor w10,w10,w12 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w10,w10,w19 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w10,w10,w7 add w24,w24,w9 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w10,w10,#31 eor w11,w11,w13 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w11,w11,w3 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w11,w11,w8 add w23,w23,w10 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w11,w11,#31 eor w12,w12,w14 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w12,w12,w4 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w12,w12,w9 add w22,w22,w11 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w12,w12,#31 eor w13,w13,w15 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w13,w13,w5 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w13,w13,w10 add w21,w21,w12 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w13,w13,#31 eor w14,w14,w16 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w14,w14,w6 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w14,w14,w11 add w20,w20,w13 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w14,w14,#31 eor w15,w15,w17 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w15,w15,w7 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w15,w15,w12 add w24,w24,w14 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w15,w15,#31 eor w16,w16,w19 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w16,w16,w8 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w16,w16,w13 add w23,w23,w15 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w16,w16,#31 eor w17,w17,w3 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w17,w17,w9 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w17,w17,w14 add w22,w22,w16 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w17,w17,#31 eor w19,w19,w4 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w19,w19,w10 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w19,w19,w15 add w21,w21,w17 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w19,w19,#31 ldp w4,w5,[x0] eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 add w20,w20,w19 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ldp w6,w7,[x0,#8] eor w25,w24,w22 ror w27,w21,#27 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 ldr w8,[x0,#16] add w20,w20,w25 // e+=F(b,c,d) add w21,w21,w5 add w22,w22,w6 add w20,w20,w4 add w23,w23,w7 add w24,w24,w8 stp w20,w21,[x0] stp w22,w23,[x0,#8] str w24,[x0,#16] cbnz x2,.Loop ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] ldp x23,x24,[sp,#48] ldp x25,x26,[sp,#64] ldp x27,x28,[sp,#80] ldr x29,[sp],#96 ret .size sha1_block_data_order_nohw,.-sha1_block_data_order_nohw .globl sha1_block_data_order_hw .hidden sha1_block_data_order_hw .type sha1_block_data_order_hw,%function .align 6 sha1_block_data_order_hw: // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 adrp x4,.Lconst add x4,x4,:lo12:.Lconst eor v1.16b,v1.16b,v1.16b ld1 {v0.4s},[x0],#16 ld1 {v1.s}[0],[x0] sub x0,x0,#16 ld1 {v16.4s,v17.4s,v18.4s,v19.4s},[x4] .Loop_hw: ld1 {v4.16b,v5.16b,v6.16b,v7.16b},[x1],#64 sub x2,x2,#1 rev32 v4.16b,v4.16b rev32 v5.16b,v5.16b add v20.4s,v16.4s,v4.4s rev32 v6.16b,v6.16b orr v22.16b,v0.16b,v0.16b // offload add v21.4s,v16.4s,v5.4s rev32 v7.16b,v7.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b .inst 0x5e140020 //sha1c v0.16b,v1.16b,v20.4s // 0 add v20.4s,v16.4s,v6.4s .inst 0x5e0630a4 //sha1su0 v4.16b,v5.16b,v6.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 1 .inst 0x5e150060 //sha1c v0.16b,v3.16b,v21.4s add v21.4s,v16.4s,v7.4s .inst 0x5e2818e4 //sha1su1 v4.16b,v7.16b .inst 0x5e0730c5 //sha1su0 v5.16b,v6.16b,v7.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b // 2 .inst 0x5e140040 //sha1c v0.16b,v2.16b,v20.4s add v20.4s,v16.4s,v4.4s .inst 0x5e281885 //sha1su1 v5.16b,v4.16b .inst 0x5e0430e6 //sha1su0 v6.16b,v7.16b,v4.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 3 .inst 0x5e150060 //sha1c v0.16b,v3.16b,v21.4s add v21.4s,v17.4s,v5.4s .inst 0x5e2818a6 //sha1su1 v6.16b,v5.16b .inst 0x5e053087 //sha1su0 v7.16b,v4.16b,v5.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b // 4 .inst 0x5e140040 //sha1c v0.16b,v2.16b,v20.4s add v20.4s,v17.4s,v6.4s .inst 0x5e2818c7 //sha1su1 v7.16b,v6.16b .inst 0x5e0630a4 //sha1su0 v4.16b,v5.16b,v6.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 5 .inst 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v17.4s,v7.4s .inst 0x5e2818e4 //sha1su1 v4.16b,v7.16b .inst 0x5e0730c5 //sha1su0 v5.16b,v6.16b,v7.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b // 6 .inst 0x5e141040 //sha1p v0.16b,v2.16b,v20.4s add v20.4s,v17.4s,v4.4s .inst 0x5e281885 //sha1su1 v5.16b,v4.16b .inst 0x5e0430e6 //sha1su0 v6.16b,v7.16b,v4.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 7 .inst 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v17.4s,v5.4s .inst 0x5e2818a6 //sha1su1 v6.16b,v5.16b .inst 0x5e053087 //sha1su0 v7.16b,v4.16b,v5.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b // 8 .inst 0x5e141040 //sha1p v0.16b,v2.16b,v20.4s add v20.4s,v18.4s,v6.4s .inst 0x5e2818c7 //sha1su1 v7.16b,v6.16b .inst 0x5e0630a4 //sha1su0 v4.16b,v5.16b,v6.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 9 .inst 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v18.4s,v7.4s .inst 0x5e2818e4 //sha1su1 v4.16b,v7.16b .inst 0x5e0730c5 //sha1su0 v5.16b,v6.16b,v7.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b // 10 .inst 0x5e142040 //sha1m v0.16b,v2.16b,v20.4s add v20.4s,v18.4s,v4.4s .inst 0x5e281885 //sha1su1 v5.16b,v4.16b .inst 0x5e0430e6 //sha1su0 v6.16b,v7.16b,v4.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 11 .inst 0x5e152060 //sha1m v0.16b,v3.16b,v21.4s add v21.4s,v18.4s,v5.4s .inst 0x5e2818a6 //sha1su1 v6.16b,v5.16b .inst 0x5e053087 //sha1su0 v7.16b,v4.16b,v5.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b // 12 .inst 0x5e142040 //sha1m v0.16b,v2.16b,v20.4s add v20.4s,v18.4s,v6.4s .inst 0x5e2818c7 //sha1su1 v7.16b,v6.16b .inst 0x5e0630a4 //sha1su0 v4.16b,v5.16b,v6.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 13 .inst 0x5e152060 //sha1m v0.16b,v3.16b,v21.4s add v21.4s,v19.4s,v7.4s .inst 0x5e2818e4 //sha1su1 v4.16b,v7.16b .inst 0x5e0730c5 //sha1su0 v5.16b,v6.16b,v7.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b // 14 .inst 0x5e142040 //sha1m v0.16b,v2.16b,v20.4s add v20.4s,v19.4s,v4.4s .inst 0x5e281885 //sha1su1 v5.16b,v4.16b .inst 0x5e0430e6 //sha1su0 v6.16b,v7.16b,v4.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 15 .inst 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v19.4s,v5.4s .inst 0x5e2818a6 //sha1su1 v6.16b,v5.16b .inst 0x5e053087 //sha1su0 v7.16b,v4.16b,v5.16b .inst 0x5e280803 //sha1h v3.16b,v0.16b // 16 .inst 0x5e141040 //sha1p v0.16b,v2.16b,v20.4s add v20.4s,v19.4s,v6.4s .inst 0x5e2818c7 //sha1su1 v7.16b,v6.16b .inst 0x5e280802 //sha1h v2.16b,v0.16b // 17 .inst 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v19.4s,v7.4s .inst 0x5e280803 //sha1h v3.16b,v0.16b // 18 .inst 0x5e141040 //sha1p v0.16b,v2.16b,v20.4s .inst 0x5e280802 //sha1h v2.16b,v0.16b // 19 .inst 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v1.4s,v1.4s,v2.4s add v0.4s,v0.4s,v22.4s cbnz x2,.Loop_hw st1 {v0.4s},[x0],#16 st1 {v1.s}[0],[x0] ldr x29,[sp],#16 ret .size sha1_block_data_order_hw,.-sha1_block_data_order_hw .section .rodata .align 6 .Lconst: .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 //K_00_19 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 //K_20_39 .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc //K_40_59 .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 //K_60_79 .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
80,233
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/aesv8-gcm-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> #if __ARM_MAX_ARCH__ >= 8 .arch armv8-a+crypto .text .globl aes_gcm_enc_kernel .hidden aes_gcm_enc_kernel .type aes_gcm_enc_kernel,%function .align 4 aes_gcm_enc_kernel: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,BORINGSSL_function_hit add x9, x9, :lo12:BORINGSSL_function_hit mov w10, #1 strb w10, [x9,#2] // kFlag_aes_gcm_enc_kernel #endif AARCH64_SIGN_LINK_REGISTER stp x29, x30, [sp, #-128]! mov x29, sp stp x19, x20, [sp, #16] mov x16, x4 mov x8, x5 stp x21, x22, [sp, #32] stp x23, x24, [sp, #48] stp d8, d9, [sp, #64] stp d10, d11, [sp, #80] stp d12, d13, [sp, #96] stp d14, d15, [sp, #112] ldr w17, [x8, #240] add x19, x8, x17, lsl #4 // borrow input_l1 for last key ldp x13, x14, [x19] // load round N keys ldr q31, [x19, #-16] // load round N-1 keys add x4, x0, x1, lsr #3 // end_input_ptr lsr x5, x1, #3 // byte_len mov x15, x5 ldp x10, x11, [x16] // ctr96_b64, ctr96_t32 ld1 { v0.16b}, [x16] // special case vector load initial counter so we can start first AES block as quickly as possible sub x5, x5, #1 // byte_len - 1 ldr q18, [x8, #0] // load rk0 and x5, x5, #0xffffffffffffffc0 // number of bytes to be processed in main loop (at least 1 byte must be handled by tail) ldr q25, [x8, #112] // load rk7 add x5, x5, x0 lsr x12, x11, #32 fmov d2, x10 // CTR block 2 orr w11, w11, w11 rev w12, w12 // rev_ctr32 fmov d1, x10 // CTR block 1 aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 0 - round 0 add w12, w12, #1 // increment rev_ctr32 rev w9, w12 // CTR block 1 fmov d3, x10 // CTR block 3 orr x9, x11, x9, lsl #32 // CTR block 1 add w12, w12, #1 // CTR block 1 ldr q19, [x8, #16] // load rk1 fmov v1.d[1], x9 // CTR block 1 rev w9, w12 // CTR block 2 add w12, w12, #1 // CTR block 2 orr x9, x11, x9, lsl #32 // CTR block 2 ldr q20, [x8, #32] // load rk2 fmov v2.d[1], x9 // CTR block 2 rev w9, w12 // CTR block 3 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 0 - round 1 orr x9, x11, x9, lsl #32 // CTR block 3 fmov v3.d[1], x9 // CTR block 3 aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 1 - round 0 ldr q21, [x8, #48] // load rk3 aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 0 - round 2 ldr q24, [x8, #96] // load rk6 aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 2 - round 0 ldr q23, [x8, #80] // load rk5 aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 1 - round 1 ldr q14, [x6, #48] // load h3l | h3h aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 3 - round 0 aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 2 - round 1 ldr q22, [x8, #64] // load rk4 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 1 - round 2 ldr q13, [x6, #32] // load h2l | h2h aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 3 - round 1 ldr q30, [x8, #192] // load rk12 aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 2 - round 2 ldr q15, [x6, #80] // load h4l | h4h aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 1 - round 3 ldr q29, [x8, #176] // load rk11 aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 3 - round 2 ldr q26, [x8, #128] // load rk8 aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 2 - round 3 add w12, w12, #1 // CTR block 3 aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 0 - round 3 aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 3 - round 3 ld1 { v11.16b}, [x3] ext v11.16b, v11.16b, v11.16b, #8 rev64 v11.16b, v11.16b aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 2 - round 4 aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 0 - round 4 aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 1 - round 4 aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 3 - round 4 cmp x17, #12 // setup flags for AES-128/192/256 check aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 0 - round 5 aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 1 - round 5 aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 3 - round 5 aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 2 - round 5 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 1 - round 6 trn2 v17.2d, v14.2d, v15.2d // h4l | h3l aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 3 - round 6 ldr q27, [x8, #144] // load rk9 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 0 - round 6 ldr q12, [x6] // load h1l | h1h aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 2 - round 6 ldr q28, [x8, #160] // load rk10 aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 1 - round 7 trn1 v9.2d, v14.2d, v15.2d // h4h | h3h aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 0 - round 7 aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 2 - round 7 aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 3 - round 7 trn2 v16.2d, v12.2d, v13.2d // h2l | h1l aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 1 - round 8 aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 2 - round 8 aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 3 - round 8 aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 0 - round 8 b.lt .Lenc_finish_first_blocks // branch if AES-128 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 1 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 2 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 3 - round 9 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 0 - round 9 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 1 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 2 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 3 - round 10 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 0 - round 10 b.eq .Lenc_finish_first_blocks // branch if AES-192 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 1 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 2 - round 11 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 0 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 3 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 1 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 2 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 0 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 3 - round 12 .Lenc_finish_first_blocks: cmp x0, x5 // check if we have <= 4 blocks eor v17.16b, v17.16b, v9.16b // h4k | h3k aese v2.16b, v31.16b // AES block 2 - round N-1 trn1 v8.2d, v12.2d, v13.2d // h2h | h1h aese v1.16b, v31.16b // AES block 1 - round N-1 aese v0.16b, v31.16b // AES block 0 - round N-1 aese v3.16b, v31.16b // AES block 3 - round N-1 eor v16.16b, v16.16b, v8.16b // h2k | h1k b.ge .Lenc_tail // handle tail ldp x19, x20, [x0, #16] // AES block 1 - load plaintext rev w9, w12 // CTR block 4 ldp x6, x7, [x0, #0] // AES block 0 - load plaintext ldp x23, x24, [x0, #48] // AES block 3 - load plaintext ldp x21, x22, [x0, #32] // AES block 2 - load plaintext add x0, x0, #64 // AES input_ptr update eor x19, x19, x13 // AES block 1 - round N low eor x20, x20, x14 // AES block 1 - round N high fmov d5, x19 // AES block 1 - mov low eor x6, x6, x13 // AES block 0 - round N low eor x7, x7, x14 // AES block 0 - round N high eor x24, x24, x14 // AES block 3 - round N high fmov d4, x6 // AES block 0 - mov low cmp x0, x5 // check if we have <= 8 blocks fmov v4.d[1], x7 // AES block 0 - mov high eor x23, x23, x13 // AES block 3 - round N low eor x21, x21, x13 // AES block 2 - round N low fmov v5.d[1], x20 // AES block 1 - mov high fmov d6, x21 // AES block 2 - mov low add w12, w12, #1 // CTR block 4 orr x9, x11, x9, lsl #32 // CTR block 4 fmov d7, x23 // AES block 3 - mov low eor x22, x22, x14 // AES block 2 - round N high fmov v6.d[1], x22 // AES block 2 - mov high eor v4.16b, v4.16b, v0.16b // AES block 0 - result fmov d0, x10 // CTR block 4 fmov v0.d[1], x9 // CTR block 4 rev w9, w12 // CTR block 5 add w12, w12, #1 // CTR block 5 eor v5.16b, v5.16b, v1.16b // AES block 1 - result fmov d1, x10 // CTR block 5 orr x9, x11, x9, lsl #32 // CTR block 5 fmov v1.d[1], x9 // CTR block 5 rev w9, w12 // CTR block 6 st1 { v4.16b}, [x2], #16 // AES block 0 - store result fmov v7.d[1], x24 // AES block 3 - mov high orr x9, x11, x9, lsl #32 // CTR block 6 eor v6.16b, v6.16b, v2.16b // AES block 2 - result st1 { v5.16b}, [x2], #16 // AES block 1 - store result add w12, w12, #1 // CTR block 6 fmov d2, x10 // CTR block 6 fmov v2.d[1], x9 // CTR block 6 st1 { v6.16b}, [x2], #16 // AES block 2 - store result rev w9, w12 // CTR block 7 orr x9, x11, x9, lsl #32 // CTR block 7 eor v7.16b, v7.16b, v3.16b // AES block 3 - result st1 { v7.16b}, [x2], #16 // AES block 3 - store result b.ge .Lenc_prepretail // do prepretail .Lenc_main_loop: // main loop start aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 0 rev64 v4.16b, v4.16b // GHASH block 4k (only t0 is free) aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 0 fmov d3, x10 // CTR block 4k+3 aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 0 ext v11.16b, v11.16b, v11.16b, #8 // PRE 0 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 1 fmov v3.d[1], x9 // CTR block 4k+3 aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 1 ldp x23, x24, [x0, #48] // AES block 4k+7 - load plaintext aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 1 ldp x21, x22, [x0, #32] // AES block 4k+6 - load plaintext aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 2 eor v4.16b, v4.16b, v11.16b // PRE 1 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 2 aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 0 eor x23, x23, x13 // AES block 4k+7 - round N low aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 3 mov d10, v17.d[1] // GHASH block 4k - mid pmull2 v9.1q, v4.2d, v15.2d // GHASH block 4k - high eor x22, x22, x14 // AES block 4k+6 - round N high mov d8, v4.d[1] // GHASH block 4k - mid aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 1 rev64 v5.16b, v5.16b // GHASH block 4k+1 (t0 and t1 free) aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 4 pmull v11.1q, v4.1d, v15.1d // GHASH block 4k - low eor v8.8b, v8.8b, v4.8b // GHASH block 4k - mid aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 2 aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 5 rev64 v7.16b, v7.16b // GHASH block 4k+3 (t0, t1, t2 and t3 free) pmull2 v4.1q, v5.2d, v14.2d // GHASH block 4k+1 - high pmull v10.1q, v8.1d, v10.1d // GHASH block 4k - mid rev64 v6.16b, v6.16b // GHASH block 4k+2 (t0, t1, and t2 free) pmull v8.1q, v5.1d, v14.1d // GHASH block 4k+1 - low eor v9.16b, v9.16b, v4.16b // GHASH block 4k+1 - high mov d4, v5.d[1] // GHASH block 4k+1 - mid aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 3 aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 2 eor v11.16b, v11.16b, v8.16b // GHASH block 4k+1 - low aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 3 aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 4 mov d8, v6.d[1] // GHASH block 4k+2 - mid aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 3 eor v4.8b, v4.8b, v5.8b // GHASH block 4k+1 - mid aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 4 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 6 eor v8.8b, v8.8b, v6.8b // GHASH block 4k+2 - mid aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 4 pmull v4.1q, v4.1d, v17.1d // GHASH block 4k+1 - mid aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 7 aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 5 ins v8.d[1], v8.d[0] // GHASH block 4k+2 - mid aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 5 aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 8 aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 5 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 6 eor v10.16b, v10.16b, v4.16b // GHASH block 4k+1 - mid pmull2 v4.1q, v6.2d, v13.2d // GHASH block 4k+2 - high pmull v5.1q, v6.1d, v13.1d // GHASH block 4k+2 - low aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 7 pmull v6.1q, v7.1d, v12.1d // GHASH block 4k+3 - low eor v9.16b, v9.16b, v4.16b // GHASH block 4k+2 - high aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 6 ldp x19, x20, [x0, #16] // AES block 4k+5 - load plaintext aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 8 mov d4, v7.d[1] // GHASH block 4k+3 - mid aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 6 eor v11.16b, v11.16b, v5.16b // GHASH block 4k+2 - low pmull2 v8.1q, v8.2d, v16.2d // GHASH block 4k+2 - mid pmull2 v5.1q, v7.2d, v12.2d // GHASH block 4k+3 - high eor v4.8b, v4.8b, v7.8b // GHASH block 4k+3 - mid aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 7 eor x19, x19, x13 // AES block 4k+5 - round N low aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 8 eor v10.16b, v10.16b, v8.16b // GHASH block 4k+2 - mid aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 7 eor x21, x21, x13 // AES block 4k+6 - round N low aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 8 movi v8.8b, #0xc2 pmull v4.1q, v4.1d, v16.1d // GHASH block 4k+3 - mid eor v9.16b, v9.16b, v5.16b // GHASH block 4k+3 - high cmp x17, #12 // setup flags for AES-128/192/256 check fmov d5, x19 // AES block 4k+5 - mov low ldp x6, x7, [x0, #0] // AES block 4k+4 - load plaintext b.lt .Lenc_main_loop_continue // branch if AES-128 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 9 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 9 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 10 b.eq .Lenc_main_loop_continue // branch if AES-192 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 11 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 12 .Lenc_main_loop_continue: shl d8, d8, #56 // mod_constant eor v11.16b, v11.16b, v6.16b // GHASH block 4k+3 - low eor v10.16b, v10.16b, v4.16b // GHASH block 4k+3 - mid add w12, w12, #1 // CTR block 4k+3 eor v4.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up add x0, x0, #64 // AES input_ptr update pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid rev w9, w12 // CTR block 4k+8 ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor x6, x6, x13 // AES block 4k+4 - round N low eor v10.16b, v10.16b, v4.16b // MODULO - karatsuba tidy up eor x7, x7, x14 // AES block 4k+4 - round N high fmov d4, x6 // AES block 4k+4 - mov low orr x9, x11, x9, lsl #32 // CTR block 4k+8 eor v7.16b, v9.16b, v7.16b // MODULO - fold into mid eor x20, x20, x14 // AES block 4k+5 - round N high eor x24, x24, x14 // AES block 4k+7 - round N high add w12, w12, #1 // CTR block 4k+8 aese v0.16b, v31.16b // AES block 4k+4 - round N-1 fmov v4.d[1], x7 // AES block 4k+4 - mov high eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid fmov d7, x23 // AES block 4k+7 - mov low aese v1.16b, v31.16b // AES block 4k+5 - round N-1 fmov v5.d[1], x20 // AES block 4k+5 - mov high fmov d6, x21 // AES block 4k+6 - mov low cmp x0, x5 // .LOOP CONTROL fmov v6.d[1], x22 // AES block 4k+6 - mov high pmull v9.1q, v10.1d, v8.1d // MODULO - mid 64b align with low eor v4.16b, v4.16b, v0.16b // AES block 4k+4 - result fmov d0, x10 // CTR block 4k+8 fmov v0.d[1], x9 // CTR block 4k+8 rev w9, w12 // CTR block 4k+9 add w12, w12, #1 // CTR block 4k+9 eor v5.16b, v5.16b, v1.16b // AES block 4k+5 - result fmov d1, x10 // CTR block 4k+9 orr x9, x11, x9, lsl #32 // CTR block 4k+9 fmov v1.d[1], x9 // CTR block 4k+9 aese v2.16b, v31.16b // AES block 4k+6 - round N-1 rev w9, w12 // CTR block 4k+10 st1 { v4.16b}, [x2], #16 // AES block 4k+4 - store result orr x9, x11, x9, lsl #32 // CTR block 4k+10 eor v11.16b, v11.16b, v9.16b // MODULO - fold into low fmov v7.d[1], x24 // AES block 4k+7 - mov high ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment st1 { v5.16b}, [x2], #16 // AES block 4k+5 - store result add w12, w12, #1 // CTR block 4k+10 aese v3.16b, v31.16b // AES block 4k+7 - round N-1 eor v6.16b, v6.16b, v2.16b // AES block 4k+6 - result fmov d2, x10 // CTR block 4k+10 st1 { v6.16b}, [x2], #16 // AES block 4k+6 - store result fmov v2.d[1], x9 // CTR block 4k+10 rev w9, w12 // CTR block 4k+11 eor v11.16b, v11.16b, v10.16b // MODULO - fold into low orr x9, x11, x9, lsl #32 // CTR block 4k+11 eor v7.16b, v7.16b, v3.16b // AES block 4k+7 - result st1 { v7.16b}, [x2], #16 // AES block 4k+7 - store result b.lt .Lenc_main_loop .Lenc_prepretail: // PREPRETAIL aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 0 rev64 v6.16b, v6.16b // GHASH block 4k+2 (t0, t1, and t2 free) aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 0 fmov d3, x10 // CTR block 4k+3 aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 0 rev64 v4.16b, v4.16b // GHASH block 4k (only t0 is free) fmov v3.d[1], x9 // CTR block 4k+3 ext v11.16b, v11.16b, v11.16b, #8 // PRE 0 aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 1 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 1 eor v4.16b, v4.16b, v11.16b // PRE 1 rev64 v5.16b, v5.16b // GHASH block 4k+1 (t0 and t1 free) aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 2 aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 0 mov d10, v17.d[1] // GHASH block 4k - mid aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 1 pmull v11.1q, v4.1d, v15.1d // GHASH block 4k - low mov d8, v4.d[1] // GHASH block 4k - mid pmull2 v9.1q, v4.2d, v15.2d // GHASH block 4k - high aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 3 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 2 eor v8.8b, v8.8b, v4.8b // GHASH block 4k - mid aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 2 aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 1 aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 3 pmull v10.1q, v8.1d, v10.1d // GHASH block 4k - mid pmull2 v4.1q, v5.2d, v14.2d // GHASH block 4k+1 - high pmull v8.1q, v5.1d, v14.1d // GHASH block 4k+1 - low aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 2 eor v9.16b, v9.16b, v4.16b // GHASH block 4k+1 - high mov d4, v5.d[1] // GHASH block 4k+1 - mid aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 3 eor v11.16b, v11.16b, v8.16b // GHASH block 4k+1 - low aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 3 eor v4.8b, v4.8b, v5.8b // GHASH block 4k+1 - mid mov d8, v6.d[1] // GHASH block 4k+2 - mid aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 4 rev64 v7.16b, v7.16b // GHASH block 4k+3 (t0, t1, t2 and t3 free) aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 4 pmull v4.1q, v4.1d, v17.1d // GHASH block 4k+1 - mid eor v8.8b, v8.8b, v6.8b // GHASH block 4k+2 - mid add w12, w12, #1 // CTR block 4k+3 pmull v5.1q, v6.1d, v13.1d // GHASH block 4k+2 - low aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 5 aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 4 eor v10.16b, v10.16b, v4.16b // GHASH block 4k+1 - mid pmull2 v4.1q, v6.2d, v13.2d // GHASH block 4k+2 - high eor v11.16b, v11.16b, v5.16b // GHASH block 4k+2 - low ins v8.d[1], v8.d[0] // GHASH block 4k+2 - mid aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 5 eor v9.16b, v9.16b, v4.16b // GHASH block 4k+2 - high mov d4, v7.d[1] // GHASH block 4k+3 - mid aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 4 pmull2 v8.1q, v8.2d, v16.2d // GHASH block 4k+2 - mid eor v4.8b, v4.8b, v7.8b // GHASH block 4k+3 - mid pmull2 v5.1q, v7.2d, v12.2d // GHASH block 4k+3 - high aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 5 pmull v4.1q, v4.1d, v16.1d // GHASH block 4k+3 - mid eor v10.16b, v10.16b, v8.16b // GHASH block 4k+2 - mid aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 5 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 6 aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 6 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 6 movi v8.8b, #0xc2 aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 6 aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 7 eor v9.16b, v9.16b, v5.16b // GHASH block 4k+3 - high aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 7 aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 7 shl d8, d8, #56 // mod_constant aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 8 eor v10.16b, v10.16b, v4.16b // GHASH block 4k+3 - mid pmull v6.1q, v7.1d, v12.1d // GHASH block 4k+3 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 8 cmp x17, #12 // setup flags for AES-128/192/256 check aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 8 eor v11.16b, v11.16b, v6.16b // GHASH block 4k+3 - low aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 7 eor v10.16b, v10.16b, v9.16b // karatsuba tidy up aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 8 pmull v4.1q, v9.1d, v8.1d ext v9.16b, v9.16b, v9.16b, #8 eor v10.16b, v10.16b, v11.16b b.lt .Lenc_finish_prepretail // branch if AES-128 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 9 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 9 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 10 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 10 b.eq .Lenc_finish_prepretail // branch if AES-192 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 11 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 12 .Lenc_finish_prepretail: eor v10.16b, v10.16b, v4.16b eor v10.16b, v10.16b, v9.16b pmull v4.1q, v10.1d, v8.1d ext v10.16b, v10.16b, v10.16b, #8 aese v1.16b, v31.16b // AES block 4k+5 - round N-1 eor v11.16b, v11.16b, v4.16b aese v3.16b, v31.16b // AES block 4k+7 - round N-1 aese v0.16b, v31.16b // AES block 4k+4 - round N-1 aese v2.16b, v31.16b // AES block 4k+6 - round N-1 eor v11.16b, v11.16b, v10.16b .Lenc_tail: // TAIL ext v8.16b, v11.16b, v11.16b, #8 // prepare final partial tag sub x5, x4, x0 // main_end_input_ptr is number of bytes left to process ldp x6, x7, [x0], #16 // AES block 4k+4 - load plaintext eor x6, x6, x13 // AES block 4k+4 - round N low eor x7, x7, x14 // AES block 4k+4 - round N high cmp x5, #48 fmov d4, x6 // AES block 4k+4 - mov low fmov v4.d[1], x7 // AES block 4k+4 - mov high eor v5.16b, v4.16b, v0.16b // AES block 4k+4 - result b.gt .Lenc_blocks_4_remaining cmp x5, #32 mov v3.16b, v2.16b movi v11.8b, #0 movi v9.8b, #0 sub w12, w12, #1 mov v2.16b, v1.16b movi v10.8b, #0 b.gt .Lenc_blocks_3_remaining mov v3.16b, v1.16b sub w12, w12, #1 cmp x5, #16 b.gt .Lenc_blocks_2_remaining sub w12, w12, #1 b .Lenc_blocks_1_remaining .Lenc_blocks_4_remaining: // blocks left = 4 st1 { v5.16b}, [x2], #16 // AES final-3 block - store result ldp x6, x7, [x0], #16 // AES final-2 block - load input low & high rev64 v4.16b, v5.16b // GHASH final-3 block eor x6, x6, x13 // AES final-2 block - round N low eor v4.16b, v4.16b, v8.16b // feed in partial tag eor x7, x7, x14 // AES final-2 block - round N high mov d22, v4.d[1] // GHASH final-3 block - mid fmov d5, x6 // AES final-2 block - mov low fmov v5.d[1], x7 // AES final-2 block - mov high eor v22.8b, v22.8b, v4.8b // GHASH final-3 block - mid movi v8.8b, #0 // suppress further partial tag feed in mov d10, v17.d[1] // GHASH final-3 block - mid pmull v11.1q, v4.1d, v15.1d // GHASH final-3 block - low pmull2 v9.1q, v4.2d, v15.2d // GHASH final-3 block - high pmull v10.1q, v22.1d, v10.1d // GHASH final-3 block - mid eor v5.16b, v5.16b, v1.16b // AES final-2 block - result .Lenc_blocks_3_remaining: // blocks left = 3 st1 { v5.16b}, [x2], #16 // AES final-2 block - store result ldp x6, x7, [x0], #16 // AES final-1 block - load input low & high rev64 v4.16b, v5.16b // GHASH final-2 block eor x6, x6, x13 // AES final-1 block - round N low eor v4.16b, v4.16b, v8.16b // feed in partial tag fmov d5, x6 // AES final-1 block - mov low eor x7, x7, x14 // AES final-1 block - round N high fmov v5.d[1], x7 // AES final-1 block - mov high movi v8.8b, #0 // suppress further partial tag feed in pmull2 v20.1q, v4.2d, v14.2d // GHASH final-2 block - high mov d22, v4.d[1] // GHASH final-2 block - mid pmull v21.1q, v4.1d, v14.1d // GHASH final-2 block - low eor v22.8b, v22.8b, v4.8b // GHASH final-2 block - mid eor v5.16b, v5.16b, v2.16b // AES final-1 block - result eor v9.16b, v9.16b, v20.16b // GHASH final-2 block - high pmull v22.1q, v22.1d, v17.1d // GHASH final-2 block - mid eor v11.16b, v11.16b, v21.16b // GHASH final-2 block - low eor v10.16b, v10.16b, v22.16b // GHASH final-2 block - mid .Lenc_blocks_2_remaining: // blocks left = 2 st1 { v5.16b}, [x2], #16 // AES final-1 block - store result rev64 v4.16b, v5.16b // GHASH final-1 block ldp x6, x7, [x0], #16 // AES final block - load input low & high eor v4.16b, v4.16b, v8.16b // feed in partial tag movi v8.8b, #0 // suppress further partial tag feed in eor x6, x6, x13 // AES final block - round N low mov d22, v4.d[1] // GHASH final-1 block - mid pmull2 v20.1q, v4.2d, v13.2d // GHASH final-1 block - high eor x7, x7, x14 // AES final block - round N high eor v22.8b, v22.8b, v4.8b // GHASH final-1 block - mid eor v9.16b, v9.16b, v20.16b // GHASH final-1 block - high ins v22.d[1], v22.d[0] // GHASH final-1 block - mid fmov d5, x6 // AES final block - mov low fmov v5.d[1], x7 // AES final block - mov high pmull2 v22.1q, v22.2d, v16.2d // GHASH final-1 block - mid pmull v21.1q, v4.1d, v13.1d // GHASH final-1 block - low eor v5.16b, v5.16b, v3.16b // AES final block - result eor v10.16b, v10.16b, v22.16b // GHASH final-1 block - mid eor v11.16b, v11.16b, v21.16b // GHASH final-1 block - low .Lenc_blocks_1_remaining: // blocks_left = 1 rev64 v4.16b, v5.16b // GHASH final block eor v4.16b, v4.16b, v8.16b // feed in partial tag pmull2 v20.1q, v4.2d, v12.2d // GHASH final block - high mov d8, v4.d[1] // GHASH final block - mid rev w9, w12 pmull v21.1q, v4.1d, v12.1d // GHASH final block - low eor v9.16b, v9.16b, v20.16b // GHASH final block - high eor v8.8b, v8.8b, v4.8b // GHASH final block - mid pmull v8.1q, v8.1d, v16.1d // GHASH final block - mid eor v11.16b, v11.16b, v21.16b // GHASH final block - low eor v10.16b, v10.16b, v8.16b // GHASH final block - mid movi v8.8b, #0xc2 eor v4.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up shl d8, d8, #56 // mod_constant eor v10.16b, v10.16b, v4.16b // MODULO - karatsuba tidy up pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid eor v10.16b, v10.16b, v9.16b // MODULO - fold into mid pmull v9.1q, v10.1d, v8.1d // MODULO - mid 64b align with low ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment str w9, [x16, #12] // store the updated counter st1 { v5.16b}, [x2] // store all 16B eor v11.16b, v11.16b, v9.16b // MODULO - fold into low eor v11.16b, v11.16b, v10.16b // MODULO - fold into low ext v11.16b, v11.16b, v11.16b, #8 rev64 v11.16b, v11.16b mov x0, x15 st1 { v11.16b }, [x3] ldp x19, x20, [sp, #16] ldp x21, x22, [sp, #32] ldp x23, x24, [sp, #48] ldp d8, d9, [sp, #64] ldp d10, d11, [sp, #80] ldp d12, d13, [sp, #96] ldp d14, d15, [sp, #112] ldp x29, x30, [sp], #128 AARCH64_VALIDATE_LINK_REGISTER ret .size aes_gcm_enc_kernel,.-aes_gcm_enc_kernel .globl aes_gcm_dec_kernel .hidden aes_gcm_dec_kernel .type aes_gcm_dec_kernel,%function .align 4 aes_gcm_dec_kernel: AARCH64_SIGN_LINK_REGISTER stp x29, x30, [sp, #-128]! mov x29, sp stp x19, x20, [sp, #16] mov x16, x4 mov x8, x5 stp x21, x22, [sp, #32] stp x23, x24, [sp, #48] stp d8, d9, [sp, #64] stp d10, d11, [sp, #80] stp d12, d13, [sp, #96] stp d14, d15, [sp, #112] ldr w17, [x8, #240] add x19, x8, x17, lsl #4 // borrow input_l1 for last key ldp x13, x14, [x19] // load round N keys ldr q31, [x19, #-16] // load round N-1 keys lsr x5, x1, #3 // byte_len mov x15, x5 ldp x10, x11, [x16] // ctr96_b64, ctr96_t32 ldr q26, [x8, #128] // load rk8 sub x5, x5, #1 // byte_len - 1 ldr q25, [x8, #112] // load rk7 and x5, x5, #0xffffffffffffffc0 // number of bytes to be processed in main loop (at least 1 byte must be handled by tail) add x4, x0, x1, lsr #3 // end_input_ptr ldr q24, [x8, #96] // load rk6 lsr x12, x11, #32 ldr q23, [x8, #80] // load rk5 orr w11, w11, w11 ldr q21, [x8, #48] // load rk3 add x5, x5, x0 rev w12, w12 // rev_ctr32 add w12, w12, #1 // increment rev_ctr32 fmov d3, x10 // CTR block 3 rev w9, w12 // CTR block 1 add w12, w12, #1 // CTR block 1 fmov d1, x10 // CTR block 1 orr x9, x11, x9, lsl #32 // CTR block 1 ld1 { v0.16b}, [x16] // special case vector load initial counter so we can start first AES block as quickly as possible fmov v1.d[1], x9 // CTR block 1 rev w9, w12 // CTR block 2 add w12, w12, #1 // CTR block 2 fmov d2, x10 // CTR block 2 orr x9, x11, x9, lsl #32 // CTR block 2 fmov v2.d[1], x9 // CTR block 2 rev w9, w12 // CTR block 3 orr x9, x11, x9, lsl #32 // CTR block 3 ldr q18, [x8, #0] // load rk0 fmov v3.d[1], x9 // CTR block 3 add w12, w12, #1 // CTR block 3 ldr q22, [x8, #64] // load rk4 ldr q19, [x8, #16] // load rk1 aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 0 - round 0 ldr q14, [x6, #48] // load h3l | h3h aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 3 - round 0 ldr q15, [x6, #80] // load h4l | h4h aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 1 - round 0 ldr q13, [x6, #32] // load h2l | h2h aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 2 - round 0 ldr q20, [x8, #32] // load rk2 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 0 - round 1 aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 1 - round 1 ld1 { v11.16b}, [x3] ext v11.16b, v11.16b, v11.16b, #8 rev64 v11.16b, v11.16b aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 2 - round 1 ldr q27, [x8, #144] // load rk9 aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 3 - round 1 ldr q30, [x8, #192] // load rk12 aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 0 - round 2 ldr q12, [x6] // load h1l | h1h aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 2 - round 2 ldr q28, [x8, #160] // load rk10 aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 3 - round 2 aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 0 - round 3 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 1 - round 2 aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 3 - round 3 aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 0 - round 4 aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 2 - round 3 aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 1 - round 3 aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 3 - round 4 aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 2 - round 4 aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 1 - round 4 aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 3 - round 5 aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 0 - round 5 aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 1 - round 5 aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 2 - round 5 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 0 - round 6 aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 3 - round 6 cmp x17, #12 // setup flags for AES-128/192/256 check aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 1 - round 6 aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 2 - round 6 aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 0 - round 7 aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 1 - round 7 aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 3 - round 7 aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 0 - round 8 aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 2 - round 7 aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 3 - round 8 aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 1 - round 8 ldr q29, [x8, #176] // load rk11 aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 2 - round 8 b.lt .Ldec_finish_first_blocks // branch if AES-128 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 0 - round 9 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 1 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 3 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 2 - round 9 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 0 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 1 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 3 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 2 - round 10 b.eq .Ldec_finish_first_blocks // branch if AES-192 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 0 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 3 - round 11 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 1 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 2 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 1 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 0 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 2 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 3 - round 12 .Ldec_finish_first_blocks: cmp x0, x5 // check if we have <= 4 blocks trn1 v9.2d, v14.2d, v15.2d // h4h | h3h trn2 v17.2d, v14.2d, v15.2d // h4l | h3l trn1 v8.2d, v12.2d, v13.2d // h2h | h1h trn2 v16.2d, v12.2d, v13.2d // h2l | h1l eor v17.16b, v17.16b, v9.16b // h4k | h3k aese v1.16b, v31.16b // AES block 1 - round N-1 aese v2.16b, v31.16b // AES block 2 - round N-1 eor v16.16b, v16.16b, v8.16b // h2k | h1k aese v3.16b, v31.16b // AES block 3 - round N-1 aese v0.16b, v31.16b // AES block 0 - round N-1 b.ge .Ldec_tail // handle tail ldr q4, [x0, #0] // AES block 0 - load ciphertext ldr q5, [x0, #16] // AES block 1 - load ciphertext rev w9, w12 // CTR block 4 eor v0.16b, v4.16b, v0.16b // AES block 0 - result eor v1.16b, v5.16b, v1.16b // AES block 1 - result rev64 v5.16b, v5.16b // GHASH block 1 ldr q7, [x0, #48] // AES block 3 - load ciphertext mov x7, v0.d[1] // AES block 0 - mov high mov x6, v0.d[0] // AES block 0 - mov low rev64 v4.16b, v4.16b // GHASH block 0 add w12, w12, #1 // CTR block 4 fmov d0, x10 // CTR block 4 orr x9, x11, x9, lsl #32 // CTR block 4 fmov v0.d[1], x9 // CTR block 4 rev w9, w12 // CTR block 5 add w12, w12, #1 // CTR block 5 mov x19, v1.d[0] // AES block 1 - mov low orr x9, x11, x9, lsl #32 // CTR block 5 mov x20, v1.d[1] // AES block 1 - mov high eor x7, x7, x14 // AES block 0 - round N high eor x6, x6, x13 // AES block 0 - round N low stp x6, x7, [x2], #16 // AES block 0 - store result fmov d1, x10 // CTR block 5 ldr q6, [x0, #32] // AES block 2 - load ciphertext add x0, x0, #64 // AES input_ptr update fmov v1.d[1], x9 // CTR block 5 rev w9, w12 // CTR block 6 add w12, w12, #1 // CTR block 6 eor x19, x19, x13 // AES block 1 - round N low orr x9, x11, x9, lsl #32 // CTR block 6 eor x20, x20, x14 // AES block 1 - round N high stp x19, x20, [x2], #16 // AES block 1 - store result eor v2.16b, v6.16b, v2.16b // AES block 2 - result cmp x0, x5 // check if we have <= 8 blocks b.ge .Ldec_prepretail // do prepretail .Ldec_main_loop: // main loop start mov x21, v2.d[0] // AES block 4k+2 - mov low ext v11.16b, v11.16b, v11.16b, #8 // PRE 0 eor v3.16b, v7.16b, v3.16b // AES block 4k+3 - result aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 0 mov x22, v2.d[1] // AES block 4k+2 - mov high aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 0 fmov d2, x10 // CTR block 4k+6 fmov v2.d[1], x9 // CTR block 4k+6 eor v4.16b, v4.16b, v11.16b // PRE 1 rev w9, w12 // CTR block 4k+7 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 1 mov x24, v3.d[1] // AES block 4k+3 - mov high aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 1 mov x23, v3.d[0] // AES block 4k+3 - mov low pmull2 v9.1q, v4.2d, v15.2d // GHASH block 4k - high mov d8, v4.d[1] // GHASH block 4k - mid fmov d3, x10 // CTR block 4k+7 aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 2 orr x9, x11, x9, lsl #32 // CTR block 4k+7 aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 0 fmov v3.d[1], x9 // CTR block 4k+7 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 2 eor v8.8b, v8.8b, v4.8b // GHASH block 4k - mid aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 3 eor x22, x22, x14 // AES block 4k+2 - round N high aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 1 mov d10, v17.d[1] // GHASH block 4k - mid aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 3 rev64 v6.16b, v6.16b // GHASH block 4k+2 aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 0 eor x21, x21, x13 // AES block 4k+2 - round N low aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 2 stp x21, x22, [x2], #16 // AES block 4k+2 - store result pmull v11.1q, v4.1d, v15.1d // GHASH block 4k - low pmull2 v4.1q, v5.2d, v14.2d // GHASH block 4k+1 - high aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 3 rev64 v7.16b, v7.16b // GHASH block 4k+3 pmull v10.1q, v8.1d, v10.1d // GHASH block 4k - mid eor x23, x23, x13 // AES block 4k+3 - round N low pmull v8.1q, v5.1d, v14.1d // GHASH block 4k+1 - low eor x24, x24, x14 // AES block 4k+3 - round N high eor v9.16b, v9.16b, v4.16b // GHASH block 4k+1 - high aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 4 aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 1 mov d4, v5.d[1] // GHASH block 4k+1 - mid aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 4 eor v11.16b, v11.16b, v8.16b // GHASH block 4k+1 - low aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 5 add w12, w12, #1 // CTR block 4k+7 aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 2 mov d8, v6.d[1] // GHASH block 4k+2 - mid aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 4 eor v4.8b, v4.8b, v5.8b // GHASH block 4k+1 - mid pmull v5.1q, v6.1d, v13.1d // GHASH block 4k+2 - low aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 3 eor v8.8b, v8.8b, v6.8b // GHASH block 4k+2 - mid aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 5 aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 5 eor v11.16b, v11.16b, v5.16b // GHASH block 4k+2 - low pmull v4.1q, v4.1d, v17.1d // GHASH block 4k+1 - mid rev w9, w12 // CTR block 4k+8 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 6 ins v8.d[1], v8.d[0] // GHASH block 4k+2 - mid aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 6 add w12, w12, #1 // CTR block 4k+8 aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 4 aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 7 eor v10.16b, v10.16b, v4.16b // GHASH block 4k+1 - mid aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 7 pmull2 v4.1q, v6.2d, v13.2d // GHASH block 4k+2 - high mov d6, v7.d[1] // GHASH block 4k+3 - mid aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 5 pmull2 v8.1q, v8.2d, v16.2d // GHASH block 4k+2 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 8 eor v9.16b, v9.16b, v4.16b // GHASH block 4k+2 - high aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 6 pmull v4.1q, v7.1d, v12.1d // GHASH block 4k+3 - low orr x9, x11, x9, lsl #32 // CTR block 4k+8 eor v10.16b, v10.16b, v8.16b // GHASH block 4k+2 - mid pmull2 v5.1q, v7.2d, v12.2d // GHASH block 4k+3 - high cmp x17, #12 // setup flags for AES-128/192/256 check eor v6.8b, v6.8b, v7.8b // GHASH block 4k+3 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 8 aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 6 eor v9.16b, v9.16b, v5.16b // GHASH block 4k+3 - high pmull v6.1q, v6.1d, v16.1d // GHASH block 4k+3 - mid movi v8.8b, #0xc2 aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 7 eor v11.16b, v11.16b, v4.16b // GHASH block 4k+3 - low aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 7 shl d8, d8, #56 // mod_constant aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 8 eor v10.16b, v10.16b, v6.16b // GHASH block 4k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 8 b.lt .Ldec_main_loop_continue // branch if AES-128 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 9 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 9 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 10 b.eq .Ldec_main_loop_continue // branch if AES-192 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 11 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 11 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 12 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 12 .Ldec_main_loop_continue: pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid eor v6.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up ldr q4, [x0, #0] // AES block 4k+4 - load ciphertext aese v0.16b, v31.16b // AES block 4k+4 - round N-1 ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor v10.16b, v10.16b, v6.16b // MODULO - karatsuba tidy up ldr q5, [x0, #16] // AES block 4k+5 - load ciphertext eor v0.16b, v4.16b, v0.16b // AES block 4k+4 - result stp x23, x24, [x2], #16 // AES block 4k+3 - store result eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid ldr q7, [x0, #48] // AES block 4k+7 - load ciphertext ldr q6, [x0, #32] // AES block 4k+6 - load ciphertext mov x7, v0.d[1] // AES block 4k+4 - mov high eor v10.16b, v10.16b, v9.16b // MODULO - fold into mid aese v1.16b, v31.16b // AES block 4k+5 - round N-1 add x0, x0, #64 // AES input_ptr update mov x6, v0.d[0] // AES block 4k+4 - mov low fmov d0, x10 // CTR block 4k+8 fmov v0.d[1], x9 // CTR block 4k+8 pmull v8.1q, v10.1d, v8.1d // MODULO - mid 64b align with low eor v1.16b, v5.16b, v1.16b // AES block 4k+5 - result rev w9, w12 // CTR block 4k+9 aese v2.16b, v31.16b // AES block 4k+6 - round N-1 orr x9, x11, x9, lsl #32 // CTR block 4k+9 cmp x0, x5 // .LOOP CONTROL add w12, w12, #1 // CTR block 4k+9 eor x6, x6, x13 // AES block 4k+4 - round N low eor x7, x7, x14 // AES block 4k+4 - round N high mov x20, v1.d[1] // AES block 4k+5 - mov high eor v2.16b, v6.16b, v2.16b // AES block 4k+6 - result eor v11.16b, v11.16b, v8.16b // MODULO - fold into low mov x19, v1.d[0] // AES block 4k+5 - mov low fmov d1, x10 // CTR block 4k+9 ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment fmov v1.d[1], x9 // CTR block 4k+9 rev w9, w12 // CTR block 4k+10 add w12, w12, #1 // CTR block 4k+10 aese v3.16b, v31.16b // AES block 4k+7 - round N-1 orr x9, x11, x9, lsl #32 // CTR block 4k+10 rev64 v5.16b, v5.16b // GHASH block 4k+5 eor x20, x20, x14 // AES block 4k+5 - round N high stp x6, x7, [x2], #16 // AES block 4k+4 - store result eor x19, x19, x13 // AES block 4k+5 - round N low stp x19, x20, [x2], #16 // AES block 4k+5 - store result rev64 v4.16b, v4.16b // GHASH block 4k+4 eor v11.16b, v11.16b, v10.16b // MODULO - fold into low b.lt .Ldec_main_loop .Ldec_prepretail: // PREPRETAIL ext v11.16b, v11.16b, v11.16b, #8 // PRE 0 mov x21, v2.d[0] // AES block 4k+2 - mov low eor v3.16b, v7.16b, v3.16b // AES block 4k+3 - result aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 0 mov x22, v2.d[1] // AES block 4k+2 - mov high aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 0 fmov d2, x10 // CTR block 4k+6 fmov v2.d[1], x9 // CTR block 4k+6 rev w9, w12 // CTR block 4k+7 eor v4.16b, v4.16b, v11.16b // PRE 1 rev64 v6.16b, v6.16b // GHASH block 4k+2 orr x9, x11, x9, lsl #32 // CTR block 4k+7 mov x23, v3.d[0] // AES block 4k+3 - mov low aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 1 mov x24, v3.d[1] // AES block 4k+3 - mov high pmull v11.1q, v4.1d, v15.1d // GHASH block 4k - low mov d8, v4.d[1] // GHASH block 4k - mid fmov d3, x10 // CTR block 4k+7 pmull2 v9.1q, v4.2d, v15.2d // GHASH block 4k - high fmov v3.d[1], x9 // CTR block 4k+7 aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 0 mov d10, v17.d[1] // GHASH block 4k - mid aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 1 eor v8.8b, v8.8b, v4.8b // GHASH block 4k - mid pmull2 v4.1q, v5.2d, v14.2d // GHASH block 4k+1 - high aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 1 rev64 v7.16b, v7.16b // GHASH block 4k+3 aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 0 pmull v10.1q, v8.1d, v10.1d // GHASH block 4k - mid eor v9.16b, v9.16b, v4.16b // GHASH block 4k+1 - high pmull v8.1q, v5.1d, v14.1d // GHASH block 4k+1 - low aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 1 mov d4, v5.d[1] // GHASH block 4k+1 - mid aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 2 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 2 eor v11.16b, v11.16b, v8.16b // GHASH block 4k+1 - low aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 2 aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 3 mov d8, v6.d[1] // GHASH block 4k+2 - mid aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 2 eor v4.8b, v4.8b, v5.8b // GHASH block 4k+1 - mid pmull v5.1q, v6.1d, v13.1d // GHASH block 4k+2 - low aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 4 aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 3 eor v8.8b, v8.8b, v6.8b // GHASH block 4k+2 - mid pmull v4.1q, v4.1d, v17.1d // GHASH block 4k+1 - mid aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 5 eor v11.16b, v11.16b, v5.16b // GHASH block 4k+2 - low aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 4 pmull2 v5.1q, v7.2d, v12.2d // GHASH block 4k+3 - high eor v10.16b, v10.16b, v4.16b // GHASH block 4k+1 - mid pmull2 v4.1q, v6.2d, v13.2d // GHASH block 4k+2 - high aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 5 ins v8.d[1], v8.d[0] // GHASH block 4k+2 - mid aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 3 aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 3 eor v9.16b, v9.16b, v4.16b // GHASH block 4k+2 - high pmull v4.1q, v7.1d, v12.1d // GHASH block 4k+3 - low aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 4 mov d6, v7.d[1] // GHASH block 4k+3 - mid aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 4 pmull2 v8.1q, v8.2d, v16.2d // GHASH block 4k+2 - mid aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 5 eor v6.8b, v6.8b, v7.8b // GHASH block 4k+3 - mid aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 5 aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 6 eor v10.16b, v10.16b, v8.16b // GHASH block 4k+2 - mid aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 6 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 6 movi v8.8b, #0xc2 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 6 eor v11.16b, v11.16b, v4.16b // GHASH block 4k+3 - low pmull v6.1q, v6.1d, v16.1d // GHASH block 4k+3 - mid aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 7 cmp x17, #12 // setup flags for AES-128/192/256 check eor v9.16b, v9.16b, v5.16b // GHASH block 4k+3 - high aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 7 aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 7 eor v10.16b, v10.16b, v6.16b // GHASH block 4k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 8 aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 7 eor v6.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 8 aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 8 shl d8, d8, #56 // mod_constant aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 8 b.lt .Ldec_finish_prepretail // branch if AES-128 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 9 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 9 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 10 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 10 b.eq .Ldec_finish_prepretail // branch if AES-192 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 11 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 11 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 11 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 12 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 12 .Ldec_finish_prepretail: eor v10.16b, v10.16b, v6.16b // MODULO - karatsuba tidy up pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid eor x22, x22, x14 // AES block 4k+2 - round N high eor x23, x23, x13 // AES block 4k+3 - round N low eor v10.16b, v10.16b, v9.16b // MODULO - fold into mid add w12, w12, #1 // CTR block 4k+7 eor x21, x21, x13 // AES block 4k+2 - round N low pmull v8.1q, v10.1d, v8.1d // MODULO - mid 64b align with low eor x24, x24, x14 // AES block 4k+3 - round N high stp x21, x22, [x2], #16 // AES block 4k+2 - store result ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment stp x23, x24, [x2], #16 // AES block 4k+3 - store result eor v11.16b, v11.16b, v8.16b // MODULO - fold into low aese v1.16b, v31.16b // AES block 4k+5 - round N-1 aese v0.16b, v31.16b // AES block 4k+4 - round N-1 aese v3.16b, v31.16b // AES block 4k+7 - round N-1 aese v2.16b, v31.16b // AES block 4k+6 - round N-1 eor v11.16b, v11.16b, v10.16b // MODULO - fold into low .Ldec_tail: // TAIL sub x5, x4, x0 // main_end_input_ptr is number of bytes left to process ld1 { v5.16b}, [x0], #16 // AES block 4k+4 - load ciphertext eor v0.16b, v5.16b, v0.16b // AES block 4k+4 - result mov x6, v0.d[0] // AES block 4k+4 - mov low mov x7, v0.d[1] // AES block 4k+4 - mov high ext v8.16b, v11.16b, v11.16b, #8 // prepare final partial tag cmp x5, #48 eor x6, x6, x13 // AES block 4k+4 - round N low eor x7, x7, x14 // AES block 4k+4 - round N high b.gt .Ldec_blocks_4_remaining sub w12, w12, #1 mov v3.16b, v2.16b movi v10.8b, #0 movi v11.8b, #0 cmp x5, #32 movi v9.8b, #0 mov v2.16b, v1.16b b.gt .Ldec_blocks_3_remaining sub w12, w12, #1 mov v3.16b, v1.16b cmp x5, #16 b.gt .Ldec_blocks_2_remaining sub w12, w12, #1 b .Ldec_blocks_1_remaining .Ldec_blocks_4_remaining: // blocks left = 4 rev64 v4.16b, v5.16b // GHASH final-3 block ld1 { v5.16b}, [x0], #16 // AES final-2 block - load ciphertext stp x6, x7, [x2], #16 // AES final-3 block - store result mov d10, v17.d[1] // GHASH final-3 block - mid eor v4.16b, v4.16b, v8.16b // feed in partial tag eor v0.16b, v5.16b, v1.16b // AES final-2 block - result mov d22, v4.d[1] // GHASH final-3 block - mid mov x6, v0.d[0] // AES final-2 block - mov low mov x7, v0.d[1] // AES final-2 block - mov high eor v22.8b, v22.8b, v4.8b // GHASH final-3 block - mid movi v8.8b, #0 // suppress further partial tag feed in pmull2 v9.1q, v4.2d, v15.2d // GHASH final-3 block - high pmull v10.1q, v22.1d, v10.1d // GHASH final-3 block - mid eor x6, x6, x13 // AES final-2 block - round N low pmull v11.1q, v4.1d, v15.1d // GHASH final-3 block - low eor x7, x7, x14 // AES final-2 block - round N high .Ldec_blocks_3_remaining: // blocks left = 3 rev64 v4.16b, v5.16b // GHASH final-2 block ld1 { v5.16b}, [x0], #16 // AES final-1 block - load ciphertext eor v4.16b, v4.16b, v8.16b // feed in partial tag stp x6, x7, [x2], #16 // AES final-2 block - store result eor v0.16b, v5.16b, v2.16b // AES final-1 block - result mov d22, v4.d[1] // GHASH final-2 block - mid pmull v21.1q, v4.1d, v14.1d // GHASH final-2 block - low pmull2 v20.1q, v4.2d, v14.2d // GHASH final-2 block - high eor v22.8b, v22.8b, v4.8b // GHASH final-2 block - mid mov x6, v0.d[0] // AES final-1 block - mov low mov x7, v0.d[1] // AES final-1 block - mov high eor v11.16b, v11.16b, v21.16b // GHASH final-2 block - low movi v8.8b, #0 // suppress further partial tag feed in pmull v22.1q, v22.1d, v17.1d // GHASH final-2 block - mid eor v9.16b, v9.16b, v20.16b // GHASH final-2 block - high eor x6, x6, x13 // AES final-1 block - round N low eor v10.16b, v10.16b, v22.16b // GHASH final-2 block - mid eor x7, x7, x14 // AES final-1 block - round N high .Ldec_blocks_2_remaining: // blocks left = 2 stp x6, x7, [x2], #16 // AES final-1 block - store result rev64 v4.16b, v5.16b // GHASH final-1 block ld1 { v5.16b}, [x0], #16 // AES final block - load ciphertext eor v4.16b, v4.16b, v8.16b // feed in partial tag movi v8.8b, #0 // suppress further partial tag feed in mov d22, v4.d[1] // GHASH final-1 block - mid eor v0.16b, v5.16b, v3.16b // AES final block - result pmull2 v20.1q, v4.2d, v13.2d // GHASH final-1 block - high eor v22.8b, v22.8b, v4.8b // GHASH final-1 block - mid pmull v21.1q, v4.1d, v13.1d // GHASH final-1 block - low mov x6, v0.d[0] // AES final block - mov low ins v22.d[1], v22.d[0] // GHASH final-1 block - mid mov x7, v0.d[1] // AES final block - mov high pmull2 v22.1q, v22.2d, v16.2d // GHASH final-1 block - mid eor x6, x6, x13 // AES final block - round N low eor v11.16b, v11.16b, v21.16b // GHASH final-1 block - low eor v9.16b, v9.16b, v20.16b // GHASH final-1 block - high eor v10.16b, v10.16b, v22.16b // GHASH final-1 block - mid eor x7, x7, x14 // AES final block - round N high .Ldec_blocks_1_remaining: // blocks_left = 1 rev w9, w12 rev64 v4.16b, v5.16b // GHASH final block eor v4.16b, v4.16b, v8.16b // feed in partial tag pmull v21.1q, v4.1d, v12.1d // GHASH final block - low mov d8, v4.d[1] // GHASH final block - mid eor v8.8b, v8.8b, v4.8b // GHASH final block - mid pmull2 v20.1q, v4.2d, v12.2d // GHASH final block - high pmull v8.1q, v8.1d, v16.1d // GHASH final block - mid eor v9.16b, v9.16b, v20.16b // GHASH final block - high eor v11.16b, v11.16b, v21.16b // GHASH final block - low eor v10.16b, v10.16b, v8.16b // GHASH final block - mid movi v8.8b, #0xc2 eor v6.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up shl d8, d8, #56 // mod_constant eor v10.16b, v10.16b, v6.16b // MODULO - karatsuba tidy up pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid eor v10.16b, v10.16b, v9.16b // MODULO - fold into mid pmull v8.1q, v10.1d, v8.1d // MODULO - mid 64b align with low ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment eor v11.16b, v11.16b, v8.16b // MODULO - fold into low stp x6, x7, [x2] str w9, [x16, #12] // store the updated counter eor v11.16b, v11.16b, v10.16b // MODULO - fold into low ext v11.16b, v11.16b, v11.16b, #8 rev64 v11.16b, v11.16b mov x0, x15 st1 { v11.16b }, [x3] ldp x19, x20, [sp, #16] ldp x21, x22, [sp, #32] ldp x23, x24, [sp, #48] ldp d8, d9, [sp, #64] ldp d10, d11, [sp, #80] ldp d12, d13, [sp, #96] ldp d14, d15, [sp, #112] ldp x29, x30, [sp], #128 AARCH64_VALIDATE_LINK_REGISTER ret .size aes_gcm_dec_kernel,.-aes_gcm_dec_kernel #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
22,385
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/keccak1600-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .text .align 8 // strategic alignment and padding that allows to use // address value as loop termination condition... .quad 0,0,0,0,0,0,0,0 .type iotas,%object iotas: .quad 0x0000000000000001 .quad 0x0000000000008082 .quad 0x800000000000808a .quad 0x8000000080008000 .quad 0x000000000000808b .quad 0x0000000080000001 .quad 0x8000000080008081 .quad 0x8000000000008009 .quad 0x000000000000008a .quad 0x0000000000000088 .quad 0x0000000080008009 .quad 0x000000008000000a .quad 0x000000008000808b .quad 0x800000000000008b .quad 0x8000000000008089 .quad 0x8000000000008003 .quad 0x8000000000008002 .quad 0x8000000000000080 .quad 0x000000000000800a .quad 0x800000008000000a .quad 0x8000000080008081 .quad 0x8000000000008080 .quad 0x0000000080000001 .quad 0x8000000080008008 .size iotas,.-iotas .type KeccakF1600_int,%function .align 5 KeccakF1600_int: AARCH64_SIGN_LINK_REGISTER adr x28,iotas stp x28,x30,[sp,#16] // 32 bytes on top are mine b .Loop .align 4 .Loop: ////////////////////////////////////////// Theta eor x26,x0,x5 stp x4,x9,[sp,#0] // offload pair... eor x27,x1,x6 eor x28,x2,x7 eor x30,x3,x8 eor x4,x4,x9 eor x26,x26,x10 eor x27,x27,x11 eor x28,x28,x12 eor x30,x30,x13 eor x4,x4,x14 eor x26,x26,x15 eor x27,x27,x16 eor x28,x28,x17 eor x30,x30,x25 eor x4,x4,x19 eor x26,x26,x20 eor x28,x28,x22 eor x27,x27,x21 eor x30,x30,x23 eor x4,x4,x24 eor x9,x26,x28,ror#63 eor x1,x1,x9 eor x6,x6,x9 eor x11,x11,x9 eor x16,x16,x9 eor x21,x21,x9 eor x9,x27,x30,ror#63 eor x28,x28,x4,ror#63 eor x30,x30,x26,ror#63 eor x4,x4,x27,ror#63 eor x27, x2,x9 // mov x27,x2 eor x7,x7,x9 eor x12,x12,x9 eor x17,x17,x9 eor x22,x22,x9 eor x0,x0,x4 eor x5,x5,x4 eor x10,x10,x4 eor x15,x15,x4 eor x20,x20,x4 ldp x4,x9,[sp,#0] // re-load offloaded data eor x26, x3,x28 // mov x26,x3 eor x8,x8,x28 eor x13,x13,x28 eor x25,x25,x28 eor x23,x23,x28 eor x28, x4,x30 // mov x28,x4 eor x9,x9,x30 eor x14,x14,x30 eor x19,x19,x30 eor x24,x24,x30 ////////////////////////////////////////// Rho+Pi mov x30,x1 ror x1,x6,#20 //mov x27,x2 ror x2,x12,#21 //mov x26,x3 ror x3,x25,#43 //mov x28,x4 ror x4,x24,#50 ror x6,x9,#44 ror x12,x13,#39 ror x25,x17,#49 ror x24,x21,#62 ror x9,x22,#3 ror x13,x19,#56 ror x17,x11,#54 ror x21,x8,#9 ror x22,x14,#25 ror x19,x23,#8 ror x11,x7,#58 ror x8,x16,#19 ror x14,x20,#46 ror x23,x15,#23 ror x7,x10,#61 ror x16,x5,#28 ror x5,x26,#36 ror x10,x30,#63 ror x15,x28,#37 ror x20,x27,#2 ////////////////////////////////////////// Chi+Iota bic x26,x2,x1 bic x27,x3,x2 bic x28,x0,x4 bic x30,x1,x0 eor x0,x0,x26 bic x26,x4,x3 eor x1,x1,x27 ldr x27,[sp,#16] eor x3,x3,x28 eor x4,x4,x30 eor x2,x2,x26 ldr x30,[x27],#8 // Iota[i++] bic x26,x7,x6 tst x27,#255 // are we done? str x27,[sp,#16] bic x27,x8,x7 bic x28,x5,x9 eor x0,x0,x30 // A[0][0] ^= Iota bic x30,x6,x5 eor x5,x5,x26 bic x26,x9,x8 eor x6,x6,x27 eor x8,x8,x28 eor x9,x9,x30 eor x7,x7,x26 bic x26,x12,x11 bic x27,x13,x12 bic x28,x10,x14 bic x30,x11,x10 eor x10,x10,x26 bic x26,x14,x13 eor x11,x11,x27 eor x13,x13,x28 eor x14,x14,x30 eor x12,x12,x26 bic x26,x17,x16 bic x27,x25,x17 bic x28,x15,x19 bic x30,x16,x15 eor x15,x15,x26 bic x26,x19,x25 eor x16,x16,x27 eor x25,x25,x28 eor x19,x19,x30 eor x17,x17,x26 bic x26,x22,x21 bic x27,x23,x22 bic x28,x20,x24 bic x30,x21,x20 eor x20,x20,x26 bic x26,x24,x23 eor x21,x21,x27 eor x23,x23,x28 eor x24,x24,x30 eor x22,x22,x26 bne .Loop ldr x30,[sp,#24] AARCH64_VALIDATE_LINK_REGISTER ret .size KeccakF1600_int,.-KeccakF1600_int .type KeccakF1600,%function .align 5 KeccakF1600: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#48 str x0,[sp,#32] // offload argument mov x26,x0 ldp x0,x1,[x0,#16*0] ldp x2,x3,[x26,#16*1] ldp x4,x5,[x26,#16*2] ldp x6,x7,[x26,#16*3] ldp x8,x9,[x26,#16*4] ldp x10,x11,[x26,#16*5] ldp x12,x13,[x26,#16*6] ldp x14,x15,[x26,#16*7] ldp x16,x17,[x26,#16*8] ldp x25,x19,[x26,#16*9] ldp x20,x21,[x26,#16*10] ldp x22,x23,[x26,#16*11] ldr x24,[x26,#16*12] bl KeccakF1600_int ldr x26,[sp,#32] stp x0,x1,[x26,#16*0] stp x2,x3,[x26,#16*1] stp x4,x5,[x26,#16*2] stp x6,x7,[x26,#16*3] stp x8,x9,[x26,#16*4] stp x10,x11,[x26,#16*5] stp x12,x13,[x26,#16*6] stp x14,x15,[x26,#16*7] stp x16,x17,[x26,#16*8] stp x25,x19,[x26,#16*9] stp x20,x21,[x26,#16*10] stp x22,x23,[x26,#16*11] str x24,[x26,#16*12] ldp x19,x20,[x29,#16] add sp,sp,#48 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .size KeccakF1600,.-KeccakF1600 .globl SHA3_Absorb_hw .hidden SHA3_Absorb_hw .type SHA3_Absorb_hw,%function .align 5 SHA3_Absorb_hw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#64 stp x0,x1,[sp,#32] // offload arguments stp x2,x3,[sp,#48] mov x26,x0 // uint64_t A[5][5] mov x27,x1 // const void *inp mov x28,x2 // size_t len mov x30,x3 // size_t bsz ldp x0,x1,[x26,#16*0] ldp x2,x3,[x26,#16*1] ldp x4,x5,[x26,#16*2] ldp x6,x7,[x26,#16*3] ldp x8,x9,[x26,#16*4] ldp x10,x11,[x26,#16*5] ldp x12,x13,[x26,#16*6] ldp x14,x15,[x26,#16*7] ldp x16,x17,[x26,#16*8] ldp x25,x19,[x26,#16*9] ldp x20,x21,[x26,#16*10] ldp x22,x23,[x26,#16*11] ldr x24,[x26,#16*12] b .Loop_absorb .align 4 .Loop_absorb: subs x26,x28,x30 // len - bsz blo .Labsorbed str x26,[sp,#48] // save len - bsz ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x0,x0,x26 cmp x30,#8*(0+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x1,x1,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x2,x2,x26 cmp x30,#8*(2+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x3,x3,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x4,x4,x26 cmp x30,#8*(4+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x5,x5,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x6,x6,x26 cmp x30,#8*(6+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x7,x7,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x8,x8,x26 cmp x30,#8*(8+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x9,x9,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x10,x10,x26 cmp x30,#8*(10+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x11,x11,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x12,x12,x26 cmp x30,#8*(12+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x13,x13,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x14,x14,x26 cmp x30,#8*(14+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x15,x15,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x16,x16,x26 cmp x30,#8*(16+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x17,x17,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x25,x25,x26 cmp x30,#8*(18+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x19,x19,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x20,x20,x26 cmp x30,#8*(20+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x21,x21,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x22,x22,x26 cmp x30,#8*(22+2) blo .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x23,x23,x26 beq .Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x24,x24,x26 .Lprocess_block: str x27,[sp,#40] // save inp bl KeccakF1600_int ldr x27,[sp,#40] // restore arguments ldp x28,x30,[sp,#48] b .Loop_absorb .align 4 .Labsorbed: ldr x27,[sp,#32] stp x0,x1,[x27,#16*0] stp x2,x3,[x27,#16*1] stp x4,x5,[x27,#16*2] stp x6,x7,[x27,#16*3] stp x8,x9,[x27,#16*4] stp x10,x11,[x27,#16*5] stp x12,x13,[x27,#16*6] stp x14,x15,[x27,#16*7] stp x16,x17,[x27,#16*8] stp x25,x19,[x27,#16*9] stp x20,x21,[x27,#16*10] stp x22,x23,[x27,#16*11] str x24,[x27,#16*12] mov x0,x28 // return value ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .size SHA3_Absorb_hw,.-SHA3_Absorb_hw .globl SHA3_Squeeze_hw .hidden SHA3_Squeeze_hw .type SHA3_Squeeze_hw,%function .align 5 SHA3_Squeeze_hw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-48]! add x29,sp,#0 cmp x2,#0 beq .Lsqueeze_abort stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] mov x19,x0 // put aside arguments mov x20,x1 mov x21,x2 mov x22,x3 cmp x4, #0 // x4 = 'padded' argument; if !=0, perform Keccak first bne .Lnext_block .Loop_squeeze: ldr x4,[x0],#8 cmp x21,#8 blo .Lsqueeze_tail #ifdef __AARCH64EB__ rev x4,x4 #endif str x4,[x20],#8 subs x21,x21,#8 beq .Lsqueeze_done subs x3,x3,#8 bhi .Loop_squeeze .Lnext_block: mov x0,x19 bl KeccakF1600 mov x0,x19 mov x3,x22 b .Loop_squeeze .align 4 .Lsqueeze_tail: strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq .Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq .Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq .Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq .Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq .Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq .Lsqueeze_done strb w4,[x20],#1 .Lsqueeze_done: ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] .Lsqueeze_abort: ldp x29,x30,[sp],#48 AARCH64_VALIDATE_LINK_REGISTER ret .size SHA3_Squeeze_hw,.-SHA3_Squeeze_hw .type KeccakF1600_ce,%function .align 5 KeccakF1600_ce: mov x9,#24 adr x10,iotas b .Loop_ce .align 4 .Loop_ce: ////////////////////////////////////////////////// Theta .inst 0xce0f2a99 //eor3 v25.16b,v20.16b,v15.16b,v10.16b .inst 0xce102eba //eor3 v26.16b,v21.16b,v16.16b,v11.16b .inst 0xce1132db //eor3 v27.16b,v22.16b,v17.16b,v12.16b .inst 0xce1236fc //eor3 v28.16b,v23.16b,v18.16b,v13.16b .inst 0xce133b1d //eor3 v29.16b,v24.16b,v19.16b,v14.16b .inst 0xce050339 //eor3 v25.16b,v25.16b, v5.16b,v0.16b .inst 0xce06075a //eor3 v26.16b,v26.16b, v6.16b,v1.16b .inst 0xce070b7b //eor3 v27.16b,v27.16b, v7.16b,v2.16b .inst 0xce080f9c //eor3 v28.16b,v28.16b, v8.16b,v3.16b .inst 0xce0913bd //eor3 v29.16b,v29.16b, v9.16b,v4.16b .inst 0xce7b8f3e //rax1 v30.16b,v25.16b,v27.16b // D[1] .inst 0xce7c8f5f //rax1 v31.16b,v26.16b,v28.16b // D[2] .inst 0xce7d8f7b //rax1 v27.16b,v27.16b,v29.16b // D[3] .inst 0xce798f9c //rax1 v28.16b,v28.16b,v25.16b // D[4] .inst 0xce7a8fbd //rax1 v29.16b,v29.16b,v26.16b // D[0] ////////////////////////////////////////////////// Theta+Rho+Pi .inst 0xce9efc39 //xar v25.16b, v1.16b,v30.16b,#63 // C[0]=A[2][0] .inst 0xce9e50c1 //xar v1.16b,v6.16b,v30.16b,#20 .inst 0xce9cb126 //xar v6.16b,v9.16b,v28.16b,#44 .inst 0xce9f0ec9 //xar v9.16b,v22.16b,v31.16b,#3 .inst 0xce9c65d6 //xar v22.16b,v14.16b,v28.16b,#25 .inst 0xce9dba8e //xar v14.16b,v20.16b,v29.16b,#46 .inst 0xce9f085a //xar v26.16b, v2.16b,v31.16b,#2 // C[1]=A[4][0] .inst 0xce9f5582 //xar v2.16b,v12.16b,v31.16b,#21 .inst 0xce9b9dac //xar v12.16b,v13.16b,v27.16b,#39 .inst 0xce9ce26d //xar v13.16b,v19.16b,v28.16b,#56 .inst 0xce9b22f3 //xar v19.16b,v23.16b,v27.16b,#8 .inst 0xce9d5df7 //xar v23.16b,v15.16b,v29.16b,#23 .inst 0xce9c948f //xar v15.16b,v4.16b,v28.16b,#37 .inst 0xce9ccb1c //xar v28.16b, v24.16b,v28.16b,#50 // D[4]=A[0][4] .inst 0xce9efab8 //xar v24.16b,v21.16b,v30.16b,#62 .inst 0xce9b2508 //xar v8.16b,v8.16b,v27.16b,#9 // A[1][3]=A[4][1] .inst 0xce9e4e04 //xar v4.16b,v16.16b,v30.16b,#19 // A[0][4]=A[1][3] .inst 0xce9d70b0 //xar v16.16b,v5.16b,v29.16b,#28 .inst 0xce9b9065 //xar v5.16b,v3.16b,v27.16b,#36 eor v0.16b,v0.16b,v29.16b .inst 0xce9bae5b //xar v27.16b, v18.16b,v27.16b,#43 // D[3]=A[0][3] .inst 0xce9fc623 //xar v3.16b,v17.16b,v31.16b,#49 // A[0][3]=A[3][3] .inst 0xce9ed97e //xar v30.16b, v11.16b,v30.16b,#54 // D[1]=A[3][2] .inst 0xce9fe8ff //xar v31.16b, v7.16b,v31.16b,#58 // D[2]=A[2][1] .inst 0xce9df55d //xar v29.16b, v10.16b,v29.16b,#61 // D[0]=A[1][2] ////////////////////////////////////////////////// Chi+Iota .inst 0xce362354 //bcax v20.16b,v26.16b, v22.16b,v8.16b // A[1][3]=A[4][1] .inst 0xce375915 //bcax v21.16b,v8.16b,v23.16b,v22.16b // A[1][3]=A[4][1] .inst 0xce385ed6 //bcax v22.16b,v22.16b,v24.16b,v23.16b .inst 0xce3a62f7 //bcax v23.16b,v23.16b,v26.16b, v24.16b .inst 0xce286b18 //bcax v24.16b,v24.16b,v8.16b,v26.16b // A[1][3]=A[4][1] ld1r {v26.2d},[x10],#8 .inst 0xce330fd1 //bcax v17.16b,v30.16b, v19.16b,v3.16b // A[0][3]=A[3][3] .inst 0xce2f4c72 //bcax v18.16b,v3.16b,v15.16b,v19.16b // A[0][3]=A[3][3] .inst 0xce303e73 //bcax v19.16b,v19.16b,v16.16b,v15.16b .inst 0xce3e41ef //bcax v15.16b,v15.16b,v30.16b, v16.16b .inst 0xce237a10 //bcax v16.16b,v16.16b,v3.16b,v30.16b // A[0][3]=A[3][3] .inst 0xce2c7f2a //bcax v10.16b,v25.16b, v12.16b,v31.16b .inst 0xce2d33eb //bcax v11.16b,v31.16b, v13.16b,v12.16b .inst 0xce2e358c //bcax v12.16b,v12.16b,v14.16b,v13.16b .inst 0xce3939ad //bcax v13.16b,v13.16b,v25.16b, v14.16b .inst 0xce3f65ce //bcax v14.16b,v14.16b,v31.16b, v25.16b .inst 0xce2913a7 //bcax v7.16b,v29.16b, v9.16b,v4.16b // A[0][4]=A[1][3] .inst 0xce252488 //bcax v8.16b,v4.16b,v5.16b,v9.16b // A[0][4]=A[1][3] .inst 0xce261529 //bcax v9.16b,v9.16b,v6.16b,v5.16b .inst 0xce3d18a5 //bcax v5.16b,v5.16b,v29.16b, v6.16b .inst 0xce2474c6 //bcax v6.16b,v6.16b,v4.16b,v29.16b // A[0][4]=A[1][3] .inst 0xce207363 //bcax v3.16b,v27.16b, v0.16b,v28.16b .inst 0xce210384 //bcax v4.16b,v28.16b, v1.16b,v0.16b .inst 0xce220400 //bcax v0.16b,v0.16b,v2.16b,v1.16b .inst 0xce3b0821 //bcax v1.16b,v1.16b,v27.16b, v2.16b .inst 0xce3c6c42 //bcax v2.16b,v2.16b,v28.16b, v27.16b eor v0.16b,v0.16b,v26.16b subs x9,x9,#1 bne .Loop_ce ret .size KeccakF1600_ce,.-KeccakF1600_ce .type KeccakF1600_cext,%function .align 5 KeccakF1600_cext: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-80]! add x29,sp,#0 stp d8,d9,[sp,#16] // per ABI requirement stp d10,d11,[sp,#32] stp d12,d13,[sp,#48] stp d14,d15,[sp,#64] ldp d0,d1,[x0,#8*0] ldp d2,d3,[x0,#8*2] ldp d4,d5,[x0,#8*4] ldp d6,d7,[x0,#8*6] ldp d8,d9,[x0,#8*8] ldp d10,d11,[x0,#8*10] ldp d12,d13,[x0,#8*12] ldp d14,d15,[x0,#8*14] ldp d16,d17,[x0,#8*16] ldp d18,d19,[x0,#8*18] ldp d20,d21,[x0,#8*20] ldp d22,d23,[x0,#8*22] ldr d24,[x0,#8*24] bl KeccakF1600_ce ldr x30,[sp,#8] stp d0,d1,[x0,#8*0] stp d2,d3,[x0,#8*2] stp d4,d5,[x0,#8*4] stp d6,d7,[x0,#8*6] stp d8,d9,[x0,#8*8] stp d10,d11,[x0,#8*10] stp d12,d13,[x0,#8*12] stp d14,d15,[x0,#8*14] stp d16,d17,[x0,#8*16] stp d18,d19,[x0,#8*18] stp d20,d21,[x0,#8*20] stp d22,d23,[x0,#8*22] str d24,[x0,#8*24] ldp d8,d9,[sp,#16] ldp d10,d11,[sp,#32] ldp d12,d13,[sp,#48] ldp d14,d15,[sp,#64] ldr x29,[sp],#80 AARCH64_VALIDATE_LINK_REGISTER ret .size KeccakF1600_cext,.-KeccakF1600_cext .globl SHA3_Absorb_cext .hidden SHA3_Absorb_cext .type SHA3_Absorb_cext,%function .align 5 SHA3_Absorb_cext: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-80]! add x29,sp,#0 stp d8,d9,[sp,#16] // per ABI requirement stp d10,d11,[sp,#32] stp d12,d13,[sp,#48] stp d14,d15,[sp,#64] ldp d0,d1,[x0,#8*0] ldp d2,d3,[x0,#8*2] ldp d4,d5,[x0,#8*4] ldp d6,d7,[x0,#8*6] ldp d8,d9,[x0,#8*8] ldp d10,d11,[x0,#8*10] ldp d12,d13,[x0,#8*12] ldp d14,d15,[x0,#8*14] ldp d16,d17,[x0,#8*16] ldp d18,d19,[x0,#8*18] ldp d20,d21,[x0,#8*20] ldp d22,d23,[x0,#8*22] ldr d24,[x0,#8*24] b .Loop_absorb_ce .align 4 .Loop_absorb_ce: subs x2,x2,x3 // len - bsz blo .Labsorbed_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v0.16b,v0.16b,v31.16b cmp x3,#8*(0+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v1.16b,v1.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v2.16b,v2.16b,v31.16b cmp x3,#8*(2+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v3.16b,v3.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v4.16b,v4.16b,v31.16b cmp x3,#8*(4+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v5.16b,v5.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v6.16b,v6.16b,v31.16b cmp x3,#8*(6+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v7.16b,v7.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v8.16b,v8.16b,v31.16b cmp x3,#8*(8+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v9.16b,v9.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v10.16b,v10.16b,v31.16b cmp x3,#8*(10+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v11.16b,v11.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v12.16b,v12.16b,v31.16b cmp x3,#8*(12+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v13.16b,v13.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v14.16b,v14.16b,v31.16b cmp x3,#8*(14+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v15.16b,v15.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v16.16b,v16.16b,v31.16b cmp x3,#8*(16+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v17.16b,v17.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v18.16b,v18.16b,v31.16b cmp x3,#8*(18+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v19.16b,v19.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v20.16b,v20.16b,v31.16b cmp x3,#8*(20+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v21.16b,v21.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v22.16b,v22.16b,v31.16b cmp x3,#8*(22+2) blo .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v23.16b,v23.16b,v31.16b beq .Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v24.16b,v24.16b,v31.16b .Lprocess_block_ce: bl KeccakF1600_ce b .Loop_absorb_ce .align 4 .Labsorbed_ce: stp d0,d1,[x0,#8*0] stp d2,d3,[x0,#8*2] stp d4,d5,[x0,#8*4] stp d6,d7,[x0,#8*6] stp d8,d9,[x0,#8*8] stp d10,d11,[x0,#8*10] stp d12,d13,[x0,#8*12] stp d14,d15,[x0,#8*14] stp d16,d17,[x0,#8*16] stp d18,d19,[x0,#8*18] stp d20,d21,[x0,#8*20] stp d22,d23,[x0,#8*22] str d24,[x0,#8*24] add x0,x2,x3 // return value ldp d8,d9,[sp,#16] ldp d10,d11,[sp,#32] ldp d12,d13,[sp,#48] ldp d14,d15,[sp,#64] ldp x29,x30,[sp],#80 AARCH64_VALIDATE_LINK_REGISTER ret .size SHA3_Absorb_cext,.-SHA3_Absorb_cext .globl SHA3_Squeeze_cext .hidden SHA3_Squeeze_cext .type SHA3_Squeeze_cext,%function .align 5 SHA3_Squeeze_cext: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 cmp x2,#0 beq .Lsqueeze_done_ce mov x9,x0 mov x10,x3 .Loop_squeeze_ce: ldr x4,[x9],#8 cmp x2,#8 blo .Lsqueeze_tail_ce #ifdef __AARCH64EB__ rev x4,x4 #endif str x4,[x1],#8 beq .Lsqueeze_done_ce sub x2,x2,#8 subs x10,x10,#8 bhi .Loop_squeeze_ce bl KeccakF1600_cext ldr x30,[sp,#8] mov x9,x0 mov x10,x3 b .Loop_squeeze_ce .align 4 .Lsqueeze_tail_ce: strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq .Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq .Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq .Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq .Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq .Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq .Lsqueeze_done_ce strb w4,[x1],#1 .Lsqueeze_done_ce: ldr x29,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .size SHA3_Squeeze_cext,.-SHA3_Squeeze_cext .byte 75,101,99,99,97,107,45,49,54,48,48,32,97,98,115,111,114,98,32,97,110,100,32,115,113,117,101,101,122,101,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
2,101
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/bn-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .text // BN_ULONG bn_add_words(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, // size_t num); .type bn_add_words, %function .globl bn_add_words .hidden bn_add_words .align 4 bn_add_words: AARCH64_VALID_CALL_TARGET # Clear the carry flag. cmn xzr, xzr # aarch64 can load two registers at a time, so we do two loop iterations at # at a time. Split x3 = 2 * x8 + x3. This allows loop # operations to use CBNZ without clobbering the carry flag. lsr x8, x3, #1 and x3, x3, #1 cbz x8, .Ladd_tail .Ladd_loop: ldp x4, x5, [x1], #16 ldp x6, x7, [x2], #16 sub x8, x8, #1 adcs x4, x4, x6 adcs x5, x5, x7 stp x4, x5, [x0], #16 cbnz x8, .Ladd_loop .Ladd_tail: cbz x3, .Ladd_exit ldr x4, [x1], #8 ldr x6, [x2], #8 adcs x4, x4, x6 str x4, [x0], #8 .Ladd_exit: cset x0, cs ret .size bn_add_words,.-bn_add_words // BN_ULONG bn_sub_words(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, // size_t num); .type bn_sub_words, %function .globl bn_sub_words .hidden bn_sub_words .align 4 bn_sub_words: AARCH64_VALID_CALL_TARGET # Set the carry flag. Arm's borrow bit is flipped from the carry flag, # so we want C = 1 here. cmp xzr, xzr # aarch64 can load two registers at a time, so we do two loop iterations at # at a time. Split x3 = 2 * x8 + x3. This allows loop # operations to use CBNZ without clobbering the carry flag. lsr x8, x3, #1 and x3, x3, #1 cbz x8, .Lsub_tail .Lsub_loop: ldp x4, x5, [x1], #16 ldp x6, x7, [x2], #16 sub x8, x8, #1 sbcs x4, x4, x6 sbcs x5, x5, x7 stp x4, x5, [x0], #16 cbnz x8, .Lsub_loop .Lsub_tail: cbz x3, .Lsub_exit ldr x4, [x1], #8 ldr x6, [x2], #8 sbcs x4, x4, x6 str x4, [x0], #8 .Lsub_exit: cset x0, cc ret .size bn_sub_words,.-bn_sub_words #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
31,208
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/fipsmodule/armv8-mont.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .text .globl bn_mul_mont .hidden bn_mul_mont .type bn_mul_mont,%function .align 5 bn_mul_mont: AARCH64_SIGN_LINK_REGISTER tst x5,#7 b.eq __bn_sqr8x_mont tst x5,#3 b.eq __bn_mul4x_mont .Lmul_mont: stp x29,x30,[sp,#-64]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] ldr x9,[x2],#8 // bp[0] sub x22,sp,x5,lsl#3 ldp x7,x8,[x1],#16 // ap[0..1] lsl x5,x5,#3 ldr x4,[x4] // *n0 and x22,x22,#-16 // ABI says so ldp x13,x14,[x3],#16 // np[0..1] mul x6,x7,x9 // ap[0]*bp[0] sub x21,x5,#16 // j=num-2 umulh x7,x7,x9 mul x10,x8,x9 // ap[1]*bp[0] umulh x11,x8,x9 mul x15,x6,x4 // "tp[0]"*n0 mov sp,x22 // alloca // (*) mul x12,x13,x15 // np[0]*m1 umulh x13,x13,x15 mul x16,x14,x15 // np[1]*m1 // (*) adds x12,x12,x6 // discarded // (*) As for removal of first multiplication and addition // instructions. The outcome of first addition is // guaranteed to be zero, which leaves two computationally // significant outcomes: it either carries or not. Then // question is when does it carry? Is there alternative // way to deduce it? If you follow operations, you can // observe that condition for carry is quite simple: // x6 being non-zero. So that carry can be calculated // by adding -1 to x6. That's what next instruction does. subs xzr,x6,#1 // (*) umulh x17,x14,x15 adc x13,x13,xzr cbz x21,.L1st_skip .L1st: ldr x8,[x1],#8 adds x6,x10,x7 sub x21,x21,#8 // j-- adc x7,x11,xzr ldr x14,[x3],#8 adds x12,x16,x13 mul x10,x8,x9 // ap[j]*bp[0] adc x13,x17,xzr umulh x11,x8,x9 adds x12,x12,x6 mul x16,x14,x15 // np[j]*m1 adc x13,x13,xzr umulh x17,x14,x15 str x12,[x22],#8 // tp[j-1] cbnz x21,.L1st .L1st_skip: adds x6,x10,x7 sub x1,x1,x5 // rewind x1 adc x7,x11,xzr adds x12,x16,x13 sub x3,x3,x5 // rewind x3 adc x13,x17,xzr adds x12,x12,x6 sub x20,x5,#8 // i=num-1 adcs x13,x13,x7 adc x19,xzr,xzr // upmost overflow bit stp x12,x13,[x22] .Louter: ldr x9,[x2],#8 // bp[i] ldp x7,x8,[x1],#16 ldr x23,[sp] // tp[0] add x22,sp,#8 mul x6,x7,x9 // ap[0]*bp[i] sub x21,x5,#16 // j=num-2 umulh x7,x7,x9 ldp x13,x14,[x3],#16 mul x10,x8,x9 // ap[1]*bp[i] adds x6,x6,x23 umulh x11,x8,x9 adc x7,x7,xzr mul x15,x6,x4 sub x20,x20,#8 // i-- // (*) mul x12,x13,x15 // np[0]*m1 umulh x13,x13,x15 mul x16,x14,x15 // np[1]*m1 // (*) adds x12,x12,x6 subs xzr,x6,#1 // (*) umulh x17,x14,x15 cbz x21,.Linner_skip .Linner: ldr x8,[x1],#8 adc x13,x13,xzr ldr x23,[x22],#8 // tp[j] adds x6,x10,x7 sub x21,x21,#8 // j-- adc x7,x11,xzr adds x12,x16,x13 ldr x14,[x3],#8 adc x13,x17,xzr mul x10,x8,x9 // ap[j]*bp[i] adds x6,x6,x23 umulh x11,x8,x9 adc x7,x7,xzr mul x16,x14,x15 // np[j]*m1 adds x12,x12,x6 umulh x17,x14,x15 str x12,[x22,#-16] // tp[j-1] cbnz x21,.Linner .Linner_skip: ldr x23,[x22],#8 // tp[j] adc x13,x13,xzr adds x6,x10,x7 sub x1,x1,x5 // rewind x1 adc x7,x11,xzr adds x12,x16,x13 sub x3,x3,x5 // rewind x3 adcs x13,x17,x19 adc x19,xzr,xzr adds x6,x6,x23 adc x7,x7,xzr adds x12,x12,x6 adcs x13,x13,x7 adc x19,x19,xzr // upmost overflow bit stp x12,x13,[x22,#-16] cbnz x20,.Louter // Final step. We see if result is larger than modulus, and // if it is, subtract the modulus. But comparison implies // subtraction. So we subtract modulus, see if it borrowed, // and conditionally copy original value. ldr x23,[sp] // tp[0] add x22,sp,#8 ldr x14,[x3],#8 // np[0] subs x21,x5,#8 // j=num-1 and clear borrow mov x1,x0 .Lsub: sbcs x8,x23,x14 // tp[j]-np[j] ldr x23,[x22],#8 sub x21,x21,#8 // j-- ldr x14,[x3],#8 str x8,[x1],#8 // rp[j]=tp[j]-np[j] cbnz x21,.Lsub sbcs x8,x23,x14 sbcs x19,x19,xzr // did it borrow? str x8,[x1],#8 // rp[num-1] ldr x23,[sp] // tp[0] add x22,sp,#8 ldr x8,[x0],#8 // rp[0] sub x5,x5,#8 // num-- nop .Lcond_copy: sub x5,x5,#8 // num-- csel x14,x23,x8,lo // did it borrow? ldr x23,[x22],#8 ldr x8,[x0],#8 str xzr,[x22,#-16] // wipe tp str x14,[x0,#-16] cbnz x5,.Lcond_copy csel x14,x23,x8,lo str xzr,[x22,#-8] // wipe tp str x14,[x0,#-8] ldp x19,x20,[x29,#16] mov sp,x29 ldp x21,x22,[x29,#32] mov x0,#1 ldp x23,x24,[x29,#48] ldr x29,[sp],#64 AARCH64_VALIDATE_LINK_REGISTER ret .size bn_mul_mont,.-bn_mul_mont .type __bn_sqr8x_mont,%function .align 5 __bn_sqr8x_mont: // Not adding AARCH64_SIGN_LINK_REGISTER here because __bn_sqr8x_mont is jumped to // only from bn_mul_mont which has already signed the return address. cmp x1,x2 b.ne __bn_mul4x_mont .Lsqr8x_mont: stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] stp x0,x3,[sp,#96] // offload rp and np ldp x6,x7,[x1,#8*0] ldp x8,x9,[x1,#8*2] ldp x10,x11,[x1,#8*4] ldp x12,x13,[x1,#8*6] sub x2,sp,x5,lsl#4 lsl x5,x5,#3 ldr x4,[x4] // *n0 mov sp,x2 // alloca sub x27,x5,#8*8 b .Lsqr8x_zero_start .Lsqr8x_zero: sub x27,x27,#8*8 stp xzr,xzr,[x2,#8*0] stp xzr,xzr,[x2,#8*2] stp xzr,xzr,[x2,#8*4] stp xzr,xzr,[x2,#8*6] .Lsqr8x_zero_start: stp xzr,xzr,[x2,#8*8] stp xzr,xzr,[x2,#8*10] stp xzr,xzr,[x2,#8*12] stp xzr,xzr,[x2,#8*14] add x2,x2,#8*16 cbnz x27,.Lsqr8x_zero add x3,x1,x5 add x1,x1,#8*8 mov x19,xzr mov x20,xzr mov x21,xzr mov x22,xzr mov x23,xzr mov x24,xzr mov x25,xzr mov x26,xzr mov x2,sp str x4,[x29,#112] // offload n0 // Multiply everything but a[i]*a[i] .align 4 .Lsqr8x_outer_loop: // a[1]a[0] (i) // a[2]a[0] // a[3]a[0] // a[4]a[0] // a[5]a[0] // a[6]a[0] // a[7]a[0] // a[2]a[1] (ii) // a[3]a[1] // a[4]a[1] // a[5]a[1] // a[6]a[1] // a[7]a[1] // a[3]a[2] (iii) // a[4]a[2] // a[5]a[2] // a[6]a[2] // a[7]a[2] // a[4]a[3] (iv) // a[5]a[3] // a[6]a[3] // a[7]a[3] // a[5]a[4] (v) // a[6]a[4] // a[7]a[4] // a[6]a[5] (vi) // a[7]a[5] // a[7]a[6] (vii) mul x14,x7,x6 // lo(a[1..7]*a[0]) (i) mul x15,x8,x6 mul x16,x9,x6 mul x17,x10,x6 adds x20,x20,x14 // t[1]+lo(a[1]*a[0]) mul x14,x11,x6 adcs x21,x21,x15 mul x15,x12,x6 adcs x22,x22,x16 mul x16,x13,x6 adcs x23,x23,x17 umulh x17,x7,x6 // hi(a[1..7]*a[0]) adcs x24,x24,x14 umulh x14,x8,x6 adcs x25,x25,x15 umulh x15,x9,x6 adcs x26,x26,x16 umulh x16,x10,x6 stp x19,x20,[x2],#8*2 // t[0..1] adc x19,xzr,xzr // t[8] adds x21,x21,x17 // t[2]+lo(a[1]*a[0]) umulh x17,x11,x6 adcs x22,x22,x14 umulh x14,x12,x6 adcs x23,x23,x15 umulh x15,x13,x6 adcs x24,x24,x16 mul x16,x8,x7 // lo(a[2..7]*a[1]) (ii) adcs x25,x25,x17 mul x17,x9,x7 adcs x26,x26,x14 mul x14,x10,x7 adc x19,x19,x15 mul x15,x11,x7 adds x22,x22,x16 mul x16,x12,x7 adcs x23,x23,x17 mul x17,x13,x7 adcs x24,x24,x14 umulh x14,x8,x7 // hi(a[2..7]*a[1]) adcs x25,x25,x15 umulh x15,x9,x7 adcs x26,x26,x16 umulh x16,x10,x7 adcs x19,x19,x17 umulh x17,x11,x7 stp x21,x22,[x2],#8*2 // t[2..3] adc x20,xzr,xzr // t[9] adds x23,x23,x14 umulh x14,x12,x7 adcs x24,x24,x15 umulh x15,x13,x7 adcs x25,x25,x16 mul x16,x9,x8 // lo(a[3..7]*a[2]) (iii) adcs x26,x26,x17 mul x17,x10,x8 adcs x19,x19,x14 mul x14,x11,x8 adc x20,x20,x15 mul x15,x12,x8 adds x24,x24,x16 mul x16,x13,x8 adcs x25,x25,x17 umulh x17,x9,x8 // hi(a[3..7]*a[2]) adcs x26,x26,x14 umulh x14,x10,x8 adcs x19,x19,x15 umulh x15,x11,x8 adcs x20,x20,x16 umulh x16,x12,x8 stp x23,x24,[x2],#8*2 // t[4..5] adc x21,xzr,xzr // t[10] adds x25,x25,x17 umulh x17,x13,x8 adcs x26,x26,x14 mul x14,x10,x9 // lo(a[4..7]*a[3]) (iv) adcs x19,x19,x15 mul x15,x11,x9 adcs x20,x20,x16 mul x16,x12,x9 adc x21,x21,x17 mul x17,x13,x9 adds x26,x26,x14 umulh x14,x10,x9 // hi(a[4..7]*a[3]) adcs x19,x19,x15 umulh x15,x11,x9 adcs x20,x20,x16 umulh x16,x12,x9 adcs x21,x21,x17 umulh x17,x13,x9 stp x25,x26,[x2],#8*2 // t[6..7] adc x22,xzr,xzr // t[11] adds x19,x19,x14 mul x14,x11,x10 // lo(a[5..7]*a[4]) (v) adcs x20,x20,x15 mul x15,x12,x10 adcs x21,x21,x16 mul x16,x13,x10 adc x22,x22,x17 umulh x17,x11,x10 // hi(a[5..7]*a[4]) adds x20,x20,x14 umulh x14,x12,x10 adcs x21,x21,x15 umulh x15,x13,x10 adcs x22,x22,x16 mul x16,x12,x11 // lo(a[6..7]*a[5]) (vi) adc x23,xzr,xzr // t[12] adds x21,x21,x17 mul x17,x13,x11 adcs x22,x22,x14 umulh x14,x12,x11 // hi(a[6..7]*a[5]) adc x23,x23,x15 umulh x15,x13,x11 adds x22,x22,x16 mul x16,x13,x12 // lo(a[7]*a[6]) (vii) adcs x23,x23,x17 umulh x17,x13,x12 // hi(a[7]*a[6]) adc x24,xzr,xzr // t[13] adds x23,x23,x14 sub x27,x3,x1 // done yet? adc x24,x24,x15 adds x24,x24,x16 sub x14,x3,x5 // rewinded ap adc x25,xzr,xzr // t[14] add x25,x25,x17 cbz x27,.Lsqr8x_outer_break mov x4,x6 ldp x6,x7,[x2,#8*0] ldp x8,x9,[x2,#8*2] ldp x10,x11,[x2,#8*4] ldp x12,x13,[x2,#8*6] adds x19,x19,x6 adcs x20,x20,x7 ldp x6,x7,[x1,#8*0] adcs x21,x21,x8 adcs x22,x22,x9 ldp x8,x9,[x1,#8*2] adcs x23,x23,x10 adcs x24,x24,x11 ldp x10,x11,[x1,#8*4] adcs x25,x25,x12 mov x0,x1 adcs x26,xzr,x13 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 //adc x28,xzr,xzr // moved below mov x27,#-8*8 // a[8]a[0] // a[9]a[0] // a[a]a[0] // a[b]a[0] // a[c]a[0] // a[d]a[0] // a[e]a[0] // a[f]a[0] // a[8]a[1] // a[f]a[1]........................ // a[8]a[2] // a[f]a[2]........................ // a[8]a[3] // a[f]a[3]........................ // a[8]a[4] // a[f]a[4]........................ // a[8]a[5] // a[f]a[5]........................ // a[8]a[6] // a[f]a[6]........................ // a[8]a[7] // a[f]a[7]........................ .Lsqr8x_mul: mul x14,x6,x4 adc x28,xzr,xzr // carry bit, modulo-scheduled mul x15,x7,x4 add x27,x27,#8 mul x16,x8,x4 mul x17,x9,x4 adds x19,x19,x14 mul x14,x10,x4 adcs x20,x20,x15 mul x15,x11,x4 adcs x21,x21,x16 mul x16,x12,x4 adcs x22,x22,x17 mul x17,x13,x4 adcs x23,x23,x14 umulh x14,x6,x4 adcs x24,x24,x15 umulh x15,x7,x4 adcs x25,x25,x16 umulh x16,x8,x4 adcs x26,x26,x17 umulh x17,x9,x4 adc x28,x28,xzr str x19,[x2],#8 adds x19,x20,x14 umulh x14,x10,x4 adcs x20,x21,x15 umulh x15,x11,x4 adcs x21,x22,x16 umulh x16,x12,x4 adcs x22,x23,x17 umulh x17,x13,x4 ldr x4,[x0,x27] adcs x23,x24,x14 adcs x24,x25,x15 adcs x25,x26,x16 adcs x26,x28,x17 //adc x28,xzr,xzr // moved above cbnz x27,.Lsqr8x_mul // note that carry flag is guaranteed // to be zero at this point cmp x1,x3 // done yet? b.eq .Lsqr8x_break ldp x6,x7,[x2,#8*0] ldp x8,x9,[x2,#8*2] ldp x10,x11,[x2,#8*4] ldp x12,x13,[x2,#8*6] adds x19,x19,x6 ldr x4,[x0,#-8*8] adcs x20,x20,x7 ldp x6,x7,[x1,#8*0] adcs x21,x21,x8 adcs x22,x22,x9 ldp x8,x9,[x1,#8*2] adcs x23,x23,x10 adcs x24,x24,x11 ldp x10,x11,[x1,#8*4] adcs x25,x25,x12 mov x27,#-8*8 adcs x26,x26,x13 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 //adc x28,xzr,xzr // moved above b .Lsqr8x_mul .align 4 .Lsqr8x_break: ldp x6,x7,[x0,#8*0] add x1,x0,#8*8 ldp x8,x9,[x0,#8*2] sub x14,x3,x1 // is it last iteration? ldp x10,x11,[x0,#8*4] sub x15,x2,x14 ldp x12,x13,[x0,#8*6] cbz x14,.Lsqr8x_outer_loop stp x19,x20,[x2,#8*0] ldp x19,x20,[x15,#8*0] stp x21,x22,[x2,#8*2] ldp x21,x22,[x15,#8*2] stp x23,x24,[x2,#8*4] ldp x23,x24,[x15,#8*4] stp x25,x26,[x2,#8*6] mov x2,x15 ldp x25,x26,[x15,#8*6] b .Lsqr8x_outer_loop .align 4 .Lsqr8x_outer_break: // Now multiply above result by 2 and add a[n-1]*a[n-1]|...|a[0]*a[0] ldp x7,x9,[x14,#8*0] // recall that x14 is &a[0] ldp x15,x16,[sp,#8*1] ldp x11,x13,[x14,#8*2] add x1,x14,#8*4 ldp x17,x14,[sp,#8*3] stp x19,x20,[x2,#8*0] mul x19,x7,x7 stp x21,x22,[x2,#8*2] umulh x7,x7,x7 stp x23,x24,[x2,#8*4] mul x8,x9,x9 stp x25,x26,[x2,#8*6] mov x2,sp umulh x9,x9,x9 adds x20,x7,x15,lsl#1 extr x15,x16,x15,#63 sub x27,x5,#8*4 .Lsqr4x_shift_n_add: adcs x21,x8,x15 extr x16,x17,x16,#63 sub x27,x27,#8*4 adcs x22,x9,x16 ldp x15,x16,[x2,#8*5] mul x10,x11,x11 ldp x7,x9,[x1],#8*2 umulh x11,x11,x11 mul x12,x13,x13 umulh x13,x13,x13 extr x17,x14,x17,#63 stp x19,x20,[x2,#8*0] adcs x23,x10,x17 extr x14,x15,x14,#63 stp x21,x22,[x2,#8*2] adcs x24,x11,x14 ldp x17,x14,[x2,#8*7] extr x15,x16,x15,#63 adcs x25,x12,x15 extr x16,x17,x16,#63 adcs x26,x13,x16 ldp x15,x16,[x2,#8*9] mul x6,x7,x7 ldp x11,x13,[x1],#8*2 umulh x7,x7,x7 mul x8,x9,x9 umulh x9,x9,x9 stp x23,x24,[x2,#8*4] extr x17,x14,x17,#63 stp x25,x26,[x2,#8*6] add x2,x2,#8*8 adcs x19,x6,x17 extr x14,x15,x14,#63 adcs x20,x7,x14 ldp x17,x14,[x2,#8*3] extr x15,x16,x15,#63 cbnz x27,.Lsqr4x_shift_n_add ldp x1,x4,[x29,#104] // pull np and n0 adcs x21,x8,x15 extr x16,x17,x16,#63 adcs x22,x9,x16 ldp x15,x16,[x2,#8*5] mul x10,x11,x11 umulh x11,x11,x11 stp x19,x20,[x2,#8*0] mul x12,x13,x13 umulh x13,x13,x13 stp x21,x22,[x2,#8*2] extr x17,x14,x17,#63 adcs x23,x10,x17 extr x14,x15,x14,#63 ldp x19,x20,[sp,#8*0] adcs x24,x11,x14 extr x15,x16,x15,#63 ldp x6,x7,[x1,#8*0] adcs x25,x12,x15 extr x16,xzr,x16,#63 ldp x8,x9,[x1,#8*2] adc x26,x13,x16 ldp x10,x11,[x1,#8*4] // Reduce by 512 bits per iteration mul x28,x4,x19 // t[0]*n0 ldp x12,x13,[x1,#8*6] add x3,x1,x5 ldp x21,x22,[sp,#8*2] stp x23,x24,[x2,#8*4] ldp x23,x24,[sp,#8*4] stp x25,x26,[x2,#8*6] ldp x25,x26,[sp,#8*6] add x1,x1,#8*8 mov x30,xzr // initial top-most carry mov x2,sp mov x27,#8 .Lsqr8x_reduction: // (*) mul x14,x6,x28 // lo(n[0-7])*lo(t[0]*n0) mul x15,x7,x28 sub x27,x27,#1 mul x16,x8,x28 str x28,[x2],#8 // put aside t[0]*n0 for tail processing mul x17,x9,x28 // (*) adds xzr,x19,x14 subs xzr,x19,#1 // (*) mul x14,x10,x28 adcs x19,x20,x15 mul x15,x11,x28 adcs x20,x21,x16 mul x16,x12,x28 adcs x21,x22,x17 mul x17,x13,x28 adcs x22,x23,x14 umulh x14,x6,x28 // hi(n[0-7])*lo(t[0]*n0) adcs x23,x24,x15 umulh x15,x7,x28 adcs x24,x25,x16 umulh x16,x8,x28 adcs x25,x26,x17 umulh x17,x9,x28 adc x26,xzr,xzr adds x19,x19,x14 umulh x14,x10,x28 adcs x20,x20,x15 umulh x15,x11,x28 adcs x21,x21,x16 umulh x16,x12,x28 adcs x22,x22,x17 umulh x17,x13,x28 mul x28,x4,x19 // next t[0]*n0 adcs x23,x23,x14 adcs x24,x24,x15 adcs x25,x25,x16 adc x26,x26,x17 cbnz x27,.Lsqr8x_reduction ldp x14,x15,[x2,#8*0] ldp x16,x17,[x2,#8*2] mov x0,x2 sub x27,x3,x1 // done yet? adds x19,x19,x14 adcs x20,x20,x15 ldp x14,x15,[x2,#8*4] adcs x21,x21,x16 adcs x22,x22,x17 ldp x16,x17,[x2,#8*6] adcs x23,x23,x14 adcs x24,x24,x15 adcs x25,x25,x16 adcs x26,x26,x17 //adc x28,xzr,xzr // moved below cbz x27,.Lsqr8x8_post_condition ldr x4,[x2,#-8*8] ldp x6,x7,[x1,#8*0] ldp x8,x9,[x1,#8*2] ldp x10,x11,[x1,#8*4] mov x27,#-8*8 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 .Lsqr8x_tail: mul x14,x6,x4 adc x28,xzr,xzr // carry bit, modulo-scheduled mul x15,x7,x4 add x27,x27,#8 mul x16,x8,x4 mul x17,x9,x4 adds x19,x19,x14 mul x14,x10,x4 adcs x20,x20,x15 mul x15,x11,x4 adcs x21,x21,x16 mul x16,x12,x4 adcs x22,x22,x17 mul x17,x13,x4 adcs x23,x23,x14 umulh x14,x6,x4 adcs x24,x24,x15 umulh x15,x7,x4 adcs x25,x25,x16 umulh x16,x8,x4 adcs x26,x26,x17 umulh x17,x9,x4 adc x28,x28,xzr str x19,[x2],#8 adds x19,x20,x14 umulh x14,x10,x4 adcs x20,x21,x15 umulh x15,x11,x4 adcs x21,x22,x16 umulh x16,x12,x4 adcs x22,x23,x17 umulh x17,x13,x4 ldr x4,[x0,x27] adcs x23,x24,x14 adcs x24,x25,x15 adcs x25,x26,x16 adcs x26,x28,x17 //adc x28,xzr,xzr // moved above cbnz x27,.Lsqr8x_tail // note that carry flag is guaranteed // to be zero at this point ldp x6,x7,[x2,#8*0] sub x27,x3,x1 // done yet? sub x16,x3,x5 // rewinded np ldp x8,x9,[x2,#8*2] ldp x10,x11,[x2,#8*4] ldp x12,x13,[x2,#8*6] cbz x27,.Lsqr8x_tail_break ldr x4,[x0,#-8*8] adds x19,x19,x6 adcs x20,x20,x7 ldp x6,x7,[x1,#8*0] adcs x21,x21,x8 adcs x22,x22,x9 ldp x8,x9,[x1,#8*2] adcs x23,x23,x10 adcs x24,x24,x11 ldp x10,x11,[x1,#8*4] adcs x25,x25,x12 mov x27,#-8*8 adcs x26,x26,x13 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 //adc x28,xzr,xzr // moved above b .Lsqr8x_tail .align 4 .Lsqr8x_tail_break: ldr x4,[x29,#112] // pull n0 add x27,x2,#8*8 // end of current t[num] window subs xzr,x30,#1 // "move" top-most carry to carry bit adcs x14,x19,x6 adcs x15,x20,x7 ldp x19,x20,[x0,#8*0] adcs x21,x21,x8 ldp x6,x7,[x16,#8*0] // recall that x16 is &n[0] adcs x22,x22,x9 ldp x8,x9,[x16,#8*2] adcs x23,x23,x10 adcs x24,x24,x11 ldp x10,x11,[x16,#8*4] adcs x25,x25,x12 adcs x26,x26,x13 ldp x12,x13,[x16,#8*6] add x1,x16,#8*8 adc x30,xzr,xzr // top-most carry mul x28,x4,x19 stp x14,x15,[x2,#8*0] stp x21,x22,[x2,#8*2] ldp x21,x22,[x0,#8*2] stp x23,x24,[x2,#8*4] ldp x23,x24,[x0,#8*4] cmp x27,x29 // did we hit the bottom? stp x25,x26,[x2,#8*6] mov x2,x0 // slide the window ldp x25,x26,[x0,#8*6] mov x27,#8 b.ne .Lsqr8x_reduction // Final step. We see if result is larger than modulus, and // if it is, subtract the modulus. But comparison implies // subtraction. So we subtract modulus, see if it borrowed, // and conditionally copy original value. ldr x0,[x29,#96] // pull rp add x2,x2,#8*8 subs x14,x19,x6 sbcs x15,x20,x7 sub x27,x5,#8*8 mov x3,x0 // x0 copy .Lsqr8x_sub: sbcs x16,x21,x8 ldp x6,x7,[x1,#8*0] sbcs x17,x22,x9 stp x14,x15,[x0,#8*0] sbcs x14,x23,x10 ldp x8,x9,[x1,#8*2] sbcs x15,x24,x11 stp x16,x17,[x0,#8*2] sbcs x16,x25,x12 ldp x10,x11,[x1,#8*4] sbcs x17,x26,x13 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 ldp x19,x20,[x2,#8*0] sub x27,x27,#8*8 ldp x21,x22,[x2,#8*2] ldp x23,x24,[x2,#8*4] ldp x25,x26,[x2,#8*6] add x2,x2,#8*8 stp x14,x15,[x0,#8*4] sbcs x14,x19,x6 stp x16,x17,[x0,#8*6] add x0,x0,#8*8 sbcs x15,x20,x7 cbnz x27,.Lsqr8x_sub sbcs x16,x21,x8 mov x2,sp add x1,sp,x5 ldp x6,x7,[x3,#8*0] sbcs x17,x22,x9 stp x14,x15,[x0,#8*0] sbcs x14,x23,x10 ldp x8,x9,[x3,#8*2] sbcs x15,x24,x11 stp x16,x17,[x0,#8*2] sbcs x16,x25,x12 ldp x19,x20,[x1,#8*0] sbcs x17,x26,x13 ldp x21,x22,[x1,#8*2] sbcs xzr,x30,xzr // did it borrow? ldr x30,[x29,#8] // pull return address stp x14,x15,[x0,#8*4] stp x16,x17,[x0,#8*6] sub x27,x5,#8*4 .Lsqr4x_cond_copy: sub x27,x27,#8*4 csel x14,x19,x6,lo stp xzr,xzr,[x2,#8*0] csel x15,x20,x7,lo ldp x6,x7,[x3,#8*4] ldp x19,x20,[x1,#8*4] csel x16,x21,x8,lo stp xzr,xzr,[x2,#8*2] add x2,x2,#8*4 csel x17,x22,x9,lo ldp x8,x9,[x3,#8*6] ldp x21,x22,[x1,#8*6] add x1,x1,#8*4 stp x14,x15,[x3,#8*0] stp x16,x17,[x3,#8*2] add x3,x3,#8*4 stp xzr,xzr,[x1,#8*0] stp xzr,xzr,[x1,#8*2] cbnz x27,.Lsqr4x_cond_copy csel x14,x19,x6,lo stp xzr,xzr,[x2,#8*0] csel x15,x20,x7,lo stp xzr,xzr,[x2,#8*2] csel x16,x21,x8,lo csel x17,x22,x9,lo stp x14,x15,[x3,#8*0] stp x16,x17,[x3,#8*2] b .Lsqr8x_done .align 4 .Lsqr8x8_post_condition: adc x28,xzr,xzr ldr x30,[x29,#8] // pull return address // x19-7,x28 hold result, x6-7 hold modulus subs x6,x19,x6 ldr x1,[x29,#96] // pull rp sbcs x7,x20,x7 stp xzr,xzr,[sp,#8*0] sbcs x8,x21,x8 stp xzr,xzr,[sp,#8*2] sbcs x9,x22,x9 stp xzr,xzr,[sp,#8*4] sbcs x10,x23,x10 stp xzr,xzr,[sp,#8*6] sbcs x11,x24,x11 stp xzr,xzr,[sp,#8*8] sbcs x12,x25,x12 stp xzr,xzr,[sp,#8*10] sbcs x13,x26,x13 stp xzr,xzr,[sp,#8*12] sbcs x28,x28,xzr // did it borrow? stp xzr,xzr,[sp,#8*14] // x6-7 hold result-modulus csel x6,x19,x6,lo csel x7,x20,x7,lo csel x8,x21,x8,lo csel x9,x22,x9,lo stp x6,x7,[x1,#8*0] csel x10,x23,x10,lo csel x11,x24,x11,lo stp x8,x9,[x1,#8*2] csel x12,x25,x12,lo csel x13,x26,x13,lo stp x10,x11,[x1,#8*4] stp x12,x13,[x1,#8*6] .Lsqr8x_done: ldp x19,x20,[x29,#16] mov sp,x29 ldp x21,x22,[x29,#32] mov x0,#1 ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldr x29,[sp],#128 // x30 is popped earlier AARCH64_VALIDATE_LINK_REGISTER ret .size __bn_sqr8x_mont,.-__bn_sqr8x_mont .type __bn_mul4x_mont,%function .align 5 __bn_mul4x_mont: // Not adding AARCH64_SIGN_LINK_REGISTER here because __bn_mul4x_mont is jumped to // only from bn_mul_mont or __bn_mul8x_mont which have already signed the // return address. stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub x26,sp,x5,lsl#3 lsl x5,x5,#3 ldr x4,[x4] // *n0 sub sp,x26,#8*4 // alloca add x10,x2,x5 add x27,x1,x5 stp x0,x10,[x29,#96] // offload rp and &b[num] ldr x24,[x2,#8*0] // b[0] ldp x6,x7,[x1,#8*0] // a[0..3] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 mov x19,xzr mov x20,xzr mov x21,xzr mov x22,xzr ldp x14,x15,[x3,#8*0] // n[0..3] ldp x16,x17,[x3,#8*2] adds x3,x3,#8*4 // clear carry bit mov x0,xzr mov x28,#0 mov x26,sp .Loop_mul4x_1st_reduction: mul x10,x6,x24 // lo(a[0..3]*b[0]) adc x0,x0,xzr // modulo-scheduled mul x11,x7,x24 add x28,x28,#8 mul x12,x8,x24 and x28,x28,#31 mul x13,x9,x24 adds x19,x19,x10 umulh x10,x6,x24 // hi(a[0..3]*b[0]) adcs x20,x20,x11 mul x25,x19,x4 // t[0]*n0 adcs x21,x21,x12 umulh x11,x7,x24 adcs x22,x22,x13 umulh x12,x8,x24 adc x23,xzr,xzr umulh x13,x9,x24 ldr x24,[x2,x28] // next b[i] (or b[0]) adds x20,x20,x10 // (*) mul x10,x14,x25 // lo(n[0..3]*t[0]*n0) str x25,[x26],#8 // put aside t[0]*n0 for tail processing adcs x21,x21,x11 mul x11,x15,x25 adcs x22,x22,x12 mul x12,x16,x25 adc x23,x23,x13 // can't overflow mul x13,x17,x25 // (*) adds xzr,x19,x10 subs xzr,x19,#1 // (*) umulh x10,x14,x25 // hi(n[0..3]*t[0]*n0) adcs x19,x20,x11 umulh x11,x15,x25 adcs x20,x21,x12 umulh x12,x16,x25 adcs x21,x22,x13 umulh x13,x17,x25 adcs x22,x23,x0 adc x0,xzr,xzr adds x19,x19,x10 sub x10,x27,x1 adcs x20,x20,x11 adcs x21,x21,x12 adcs x22,x22,x13 //adc x0,x0,xzr cbnz x28,.Loop_mul4x_1st_reduction cbz x10,.Lmul4x4_post_condition ldp x6,x7,[x1,#8*0] // a[4..7] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 ldr x25,[sp] // a[0]*n0 ldp x14,x15,[x3,#8*0] // n[4..7] ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 .Loop_mul4x_1st_tail: mul x10,x6,x24 // lo(a[4..7]*b[i]) adc x0,x0,xzr // modulo-scheduled mul x11,x7,x24 add x28,x28,#8 mul x12,x8,x24 and x28,x28,#31 mul x13,x9,x24 adds x19,x19,x10 umulh x10,x6,x24 // hi(a[4..7]*b[i]) adcs x20,x20,x11 umulh x11,x7,x24 adcs x21,x21,x12 umulh x12,x8,x24 adcs x22,x22,x13 umulh x13,x9,x24 adc x23,xzr,xzr ldr x24,[x2,x28] // next b[i] (or b[0]) adds x20,x20,x10 mul x10,x14,x25 // lo(n[4..7]*a[0]*n0) adcs x21,x21,x11 mul x11,x15,x25 adcs x22,x22,x12 mul x12,x16,x25 adc x23,x23,x13 // can't overflow mul x13,x17,x25 adds x19,x19,x10 umulh x10,x14,x25 // hi(n[4..7]*a[0]*n0) adcs x20,x20,x11 umulh x11,x15,x25 adcs x21,x21,x12 umulh x12,x16,x25 adcs x22,x22,x13 adcs x23,x23,x0 umulh x13,x17,x25 adc x0,xzr,xzr ldr x25,[sp,x28] // next t[0]*n0 str x19,[x26],#8 // result!!! adds x19,x20,x10 sub x10,x27,x1 // done yet? adcs x20,x21,x11 adcs x21,x22,x12 adcs x22,x23,x13 //adc x0,x0,xzr cbnz x28,.Loop_mul4x_1st_tail sub x11,x27,x5 // rewinded x1 cbz x10,.Lmul4x_proceed ldp x6,x7,[x1,#8*0] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 ldp x14,x15,[x3,#8*0] ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 b .Loop_mul4x_1st_tail .align 5 .Lmul4x_proceed: ldr x24,[x2,#8*4]! // *++b adc x30,x0,xzr ldp x6,x7,[x11,#8*0] // a[0..3] sub x3,x3,x5 // rewind np ldp x8,x9,[x11,#8*2] add x1,x11,#8*4 stp x19,x20,[x26,#8*0] // result!!! ldp x19,x20,[sp,#8*4] // t[0..3] stp x21,x22,[x26,#8*2] // result!!! ldp x21,x22,[sp,#8*6] ldp x14,x15,[x3,#8*0] // n[0..3] mov x26,sp ldp x16,x17,[x3,#8*2] adds x3,x3,#8*4 // clear carry bit mov x0,xzr .align 4 .Loop_mul4x_reduction: mul x10,x6,x24 // lo(a[0..3]*b[4]) adc x0,x0,xzr // modulo-scheduled mul x11,x7,x24 add x28,x28,#8 mul x12,x8,x24 and x28,x28,#31 mul x13,x9,x24 adds x19,x19,x10 umulh x10,x6,x24 // hi(a[0..3]*b[4]) adcs x20,x20,x11 mul x25,x19,x4 // t[0]*n0 adcs x21,x21,x12 umulh x11,x7,x24 adcs x22,x22,x13 umulh x12,x8,x24 adc x23,xzr,xzr umulh x13,x9,x24 ldr x24,[x2,x28] // next b[i] adds x20,x20,x10 // (*) mul x10,x14,x25 str x25,[x26],#8 // put aside t[0]*n0 for tail processing adcs x21,x21,x11 mul x11,x15,x25 // lo(n[0..3]*t[0]*n0 adcs x22,x22,x12 mul x12,x16,x25 adc x23,x23,x13 // can't overflow mul x13,x17,x25 // (*) adds xzr,x19,x10 subs xzr,x19,#1 // (*) umulh x10,x14,x25 // hi(n[0..3]*t[0]*n0 adcs x19,x20,x11 umulh x11,x15,x25 adcs x20,x21,x12 umulh x12,x16,x25 adcs x21,x22,x13 umulh x13,x17,x25 adcs x22,x23,x0 adc x0,xzr,xzr adds x19,x19,x10 adcs x20,x20,x11 adcs x21,x21,x12 adcs x22,x22,x13 //adc x0,x0,xzr cbnz x28,.Loop_mul4x_reduction adc x0,x0,xzr ldp x10,x11,[x26,#8*4] // t[4..7] ldp x12,x13,[x26,#8*6] ldp x6,x7,[x1,#8*0] // a[4..7] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 adds x19,x19,x10 adcs x20,x20,x11 adcs x21,x21,x12 adcs x22,x22,x13 //adc x0,x0,xzr ldr x25,[sp] // t[0]*n0 ldp x14,x15,[x3,#8*0] // n[4..7] ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 .align 4 .Loop_mul4x_tail: mul x10,x6,x24 // lo(a[4..7]*b[4]) adc x0,x0,xzr // modulo-scheduled mul x11,x7,x24 add x28,x28,#8 mul x12,x8,x24 and x28,x28,#31 mul x13,x9,x24 adds x19,x19,x10 umulh x10,x6,x24 // hi(a[4..7]*b[4]) adcs x20,x20,x11 umulh x11,x7,x24 adcs x21,x21,x12 umulh x12,x8,x24 adcs x22,x22,x13 umulh x13,x9,x24 adc x23,xzr,xzr ldr x24,[x2,x28] // next b[i] adds x20,x20,x10 mul x10,x14,x25 // lo(n[4..7]*t[0]*n0) adcs x21,x21,x11 mul x11,x15,x25 adcs x22,x22,x12 mul x12,x16,x25 adc x23,x23,x13 // can't overflow mul x13,x17,x25 adds x19,x19,x10 umulh x10,x14,x25 // hi(n[4..7]*t[0]*n0) adcs x20,x20,x11 umulh x11,x15,x25 adcs x21,x21,x12 umulh x12,x16,x25 adcs x22,x22,x13 umulh x13,x17,x25 adcs x23,x23,x0 ldr x25,[sp,x28] // next a[0]*n0 adc x0,xzr,xzr str x19,[x26],#8 // result!!! adds x19,x20,x10 sub x10,x27,x1 // done yet? adcs x20,x21,x11 adcs x21,x22,x12 adcs x22,x23,x13 //adc x0,x0,xzr cbnz x28,.Loop_mul4x_tail sub x11,x3,x5 // rewinded np? adc x0,x0,xzr cbz x10,.Loop_mul4x_break ldp x10,x11,[x26,#8*4] ldp x12,x13,[x26,#8*6] ldp x6,x7,[x1,#8*0] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 adds x19,x19,x10 adcs x20,x20,x11 adcs x21,x21,x12 adcs x22,x22,x13 //adc x0,x0,xzr ldp x14,x15,[x3,#8*0] ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 b .Loop_mul4x_tail .align 4 .Loop_mul4x_break: ldp x12,x13,[x29,#96] // pull rp and &b[num] adds x19,x19,x30 add x2,x2,#8*4 // bp++ adcs x20,x20,xzr sub x1,x1,x5 // rewind ap adcs x21,x21,xzr stp x19,x20,[x26,#8*0] // result!!! adcs x22,x22,xzr ldp x19,x20,[sp,#8*4] // t[0..3] adc x30,x0,xzr stp x21,x22,[x26,#8*2] // result!!! cmp x2,x13 // done yet? ldp x21,x22,[sp,#8*6] ldp x14,x15,[x11,#8*0] // n[0..3] ldp x16,x17,[x11,#8*2] add x3,x11,#8*4 b.eq .Lmul4x_post ldr x24,[x2] ldp x6,x7,[x1,#8*0] // a[0..3] ldp x8,x9,[x1,#8*2] adds x1,x1,#8*4 // clear carry bit mov x0,xzr mov x26,sp b .Loop_mul4x_reduction .align 4 .Lmul4x_post: // Final step. We see if result is larger than modulus, and // if it is, subtract the modulus. But comparison implies // subtraction. So we subtract modulus, see if it borrowed, // and conditionally copy original value. mov x0,x12 mov x27,x12 // x0 copy subs x10,x19,x14 add x26,sp,#8*8 sbcs x11,x20,x15 sub x28,x5,#8*4 .Lmul4x_sub: sbcs x12,x21,x16 ldp x14,x15,[x3,#8*0] sub x28,x28,#8*4 ldp x19,x20,[x26,#8*0] sbcs x13,x22,x17 ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 ldp x21,x22,[x26,#8*2] add x26,x26,#8*4 stp x10,x11,[x0,#8*0] sbcs x10,x19,x14 stp x12,x13,[x0,#8*2] add x0,x0,#8*4 sbcs x11,x20,x15 cbnz x28,.Lmul4x_sub sbcs x12,x21,x16 mov x26,sp add x1,sp,#8*4 ldp x6,x7,[x27,#8*0] sbcs x13,x22,x17 stp x10,x11,[x0,#8*0] ldp x8,x9,[x27,#8*2] stp x12,x13,[x0,#8*2] ldp x19,x20,[x1,#8*0] ldp x21,x22,[x1,#8*2] sbcs xzr,x30,xzr // did it borrow? ldr x30,[x29,#8] // pull return address sub x28,x5,#8*4 .Lmul4x_cond_copy: sub x28,x28,#8*4 csel x10,x19,x6,lo stp xzr,xzr,[x26,#8*0] csel x11,x20,x7,lo ldp x6,x7,[x27,#8*4] ldp x19,x20,[x1,#8*4] csel x12,x21,x8,lo stp xzr,xzr,[x26,#8*2] add x26,x26,#8*4 csel x13,x22,x9,lo ldp x8,x9,[x27,#8*6] ldp x21,x22,[x1,#8*6] add x1,x1,#8*4 stp x10,x11,[x27,#8*0] stp x12,x13,[x27,#8*2] add x27,x27,#8*4 cbnz x28,.Lmul4x_cond_copy csel x10,x19,x6,lo stp xzr,xzr,[x26,#8*0] csel x11,x20,x7,lo stp xzr,xzr,[x26,#8*2] csel x12,x21,x8,lo stp xzr,xzr,[x26,#8*3] csel x13,x22,x9,lo stp xzr,xzr,[x26,#8*4] stp x10,x11,[x27,#8*0] stp x12,x13,[x27,#8*2] b .Lmul4x_done .align 4 .Lmul4x4_post_condition: adc x0,x0,xzr ldr x1,[x29,#96] // pull rp // x19-3,x0 hold result, x14-7 hold modulus subs x6,x19,x14 ldr x30,[x29,#8] // pull return address sbcs x7,x20,x15 stp xzr,xzr,[sp,#8*0] sbcs x8,x21,x16 stp xzr,xzr,[sp,#8*2] sbcs x9,x22,x17 stp xzr,xzr,[sp,#8*4] sbcs xzr,x0,xzr // did it borrow? stp xzr,xzr,[sp,#8*6] // x6-3 hold result-modulus csel x6,x19,x6,lo csel x7,x20,x7,lo csel x8,x21,x8,lo csel x9,x22,x9,lo stp x6,x7,[x1,#8*0] stp x8,x9,[x1,#8*2] .Lmul4x_done: ldp x19,x20,[x29,#16] mov sp,x29 ldp x21,x22,[x29,#32] mov x0,#1 ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldr x29,[sp],#128 // x30 is popped earlier AARCH64_VALIDATE_LINK_REGISTER ret .size __bn_mul4x_mont,.-__bn_mul4x_mont .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 4 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
40,450
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/chacha/chacha-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .section .rodata .align 5 .Lsigma: .quad 0x3320646e61707865,0x6b20657479622d32 // endian-neutral .Lone: .long 1,0,0,0 .byte 67,104,97,67,104,97,50,48,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .text .globl ChaCha20_ctr32_nohw .hidden ChaCha20_ctr32_nohw .type ChaCha20_ctr32_nohw,%function .align 5 ChaCha20_ctr32_nohw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 adrp x5,.Lsigma add x5,x5,:lo12:.Lsigma stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#64 ldp x22,x23,[x5] // load sigma ldp x24,x25,[x3] // load key ldp x26,x27,[x3,#16] ldp x28,x30,[x4] // load counter #ifdef __AARCH64EB__ ror x24,x24,#32 ror x25,x25,#32 ror x26,x26,#32 ror x27,x27,#32 ror x28,x28,#32 ror x30,x30,#32 #endif .Loop_outer: mov w5,w22 // unpack key block lsr x6,x22,#32 mov w7,w23 lsr x8,x23,#32 mov w9,w24 lsr x10,x24,#32 mov w11,w25 lsr x12,x25,#32 mov w13,w26 lsr x14,x26,#32 mov w15,w27 lsr x16,x27,#32 mov w17,w28 lsr x19,x28,#32 mov w20,w30 lsr x21,x30,#32 mov x4,#10 subs x2,x2,#64 .Loop: sub x4,x4,#1 add w5,w5,w9 add w6,w6,w10 add w7,w7,w11 add w8,w8,w12 eor w17,w17,w5 eor w19,w19,w6 eor w20,w20,w7 eor w21,w21,w8 ror w17,w17,#16 ror w19,w19,#16 ror w20,w20,#16 ror w21,w21,#16 add w13,w13,w17 add w14,w14,w19 add w15,w15,w20 add w16,w16,w21 eor w9,w9,w13 eor w10,w10,w14 eor w11,w11,w15 eor w12,w12,w16 ror w9,w9,#20 ror w10,w10,#20 ror w11,w11,#20 ror w12,w12,#20 add w5,w5,w9 add w6,w6,w10 add w7,w7,w11 add w8,w8,w12 eor w17,w17,w5 eor w19,w19,w6 eor w20,w20,w7 eor w21,w21,w8 ror w17,w17,#24 ror w19,w19,#24 ror w20,w20,#24 ror w21,w21,#24 add w13,w13,w17 add w14,w14,w19 add w15,w15,w20 add w16,w16,w21 eor w9,w9,w13 eor w10,w10,w14 eor w11,w11,w15 eor w12,w12,w16 ror w9,w9,#25 ror w10,w10,#25 ror w11,w11,#25 ror w12,w12,#25 add w5,w5,w10 add w6,w6,w11 add w7,w7,w12 add w8,w8,w9 eor w21,w21,w5 eor w17,w17,w6 eor w19,w19,w7 eor w20,w20,w8 ror w21,w21,#16 ror w17,w17,#16 ror w19,w19,#16 ror w20,w20,#16 add w15,w15,w21 add w16,w16,w17 add w13,w13,w19 add w14,w14,w20 eor w10,w10,w15 eor w11,w11,w16 eor w12,w12,w13 eor w9,w9,w14 ror w10,w10,#20 ror w11,w11,#20 ror w12,w12,#20 ror w9,w9,#20 add w5,w5,w10 add w6,w6,w11 add w7,w7,w12 add w8,w8,w9 eor w21,w21,w5 eor w17,w17,w6 eor w19,w19,w7 eor w20,w20,w8 ror w21,w21,#24 ror w17,w17,#24 ror w19,w19,#24 ror w20,w20,#24 add w15,w15,w21 add w16,w16,w17 add w13,w13,w19 add w14,w14,w20 eor w10,w10,w15 eor w11,w11,w16 eor w12,w12,w13 eor w9,w9,w14 ror w10,w10,#25 ror w11,w11,#25 ror w12,w12,#25 ror w9,w9,#25 cbnz x4,.Loop add w5,w5,w22 // accumulate key block add x6,x6,x22,lsr#32 add w7,w7,w23 add x8,x8,x23,lsr#32 add w9,w9,w24 add x10,x10,x24,lsr#32 add w11,w11,w25 add x12,x12,x25,lsr#32 add w13,w13,w26 add x14,x14,x26,lsr#32 add w15,w15,w27 add x16,x16,x27,lsr#32 add w17,w17,w28 add x19,x19,x28,lsr#32 add w20,w20,w30 add x21,x21,x30,lsr#32 b.lo .Ltail add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 ldp x6,x8,[x1,#0] // load input add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 ldp x10,x12,[x1,#16] add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 ldp x14,x16,[x1,#32] add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 ldp x19,x21,[x1,#48] add x1,x1,#64 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor x15,x15,x16 eor x17,x17,x19 eor x20,x20,x21 stp x5,x7,[x0,#0] // store output add x28,x28,#1 // increment counter stp x9,x11,[x0,#16] stp x13,x15,[x0,#32] stp x17,x20,[x0,#48] add x0,x0,#64 b.hi .Loop_outer ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .align 4 .Ltail: add x2,x2,#64 .Less_than_64: sub x0,x0,#1 add x1,x1,x2 add x0,x0,x2 add x4,sp,x2 neg x2,x2 add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif stp x5,x7,[sp,#0] stp x9,x11,[sp,#16] stp x13,x15,[sp,#32] stp x17,x20,[sp,#48] .Loop_tail: ldrb w10,[x1,x2] ldrb w11,[x4,x2] add x2,x2,#1 eor w10,w10,w11 strb w10,[x0,x2] cbnz x2,.Loop_tail stp xzr,xzr,[sp,#0] stp xzr,xzr,[sp,#16] stp xzr,xzr,[sp,#32] stp xzr,xzr,[sp,#48] ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .size ChaCha20_ctr32_nohw,.-ChaCha20_ctr32_nohw .globl ChaCha20_ctr32_neon .hidden ChaCha20_ctr32_neon .type ChaCha20_ctr32_neon,%function .align 5 ChaCha20_ctr32_neon: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 adrp x5,.Lsigma add x5,x5,:lo12:.Lsigma stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] cmp x2,#512 b.hs .L512_or_more_neon sub sp,sp,#64 ldp x22,x23,[x5] // load sigma ld1 {v24.4s},[x5],#16 ldp x24,x25,[x3] // load key ldp x26,x27,[x3,#16] ld1 {v25.4s,v26.4s},[x3] ldp x28,x30,[x4] // load counter ld1 {v27.4s},[x4] ld1 {v31.4s},[x5] #ifdef __AARCH64EB__ rev64 v24.4s,v24.4s ror x24,x24,#32 ror x25,x25,#32 ror x26,x26,#32 ror x27,x27,#32 ror x28,x28,#32 ror x30,x30,#32 #endif add v27.4s,v27.4s,v31.4s // += 1 add v28.4s,v27.4s,v31.4s add v29.4s,v28.4s,v31.4s shl v31.4s,v31.4s,#2 // 1 -> 4 .Loop_outer_neon: mov w5,w22 // unpack key block lsr x6,x22,#32 mov v0.16b,v24.16b mov w7,w23 lsr x8,x23,#32 mov v4.16b,v24.16b mov w9,w24 lsr x10,x24,#32 mov v16.16b,v24.16b mov w11,w25 mov v1.16b,v25.16b lsr x12,x25,#32 mov v5.16b,v25.16b mov w13,w26 mov v17.16b,v25.16b lsr x14,x26,#32 mov v3.16b,v27.16b mov w15,w27 mov v7.16b,v28.16b lsr x16,x27,#32 mov v19.16b,v29.16b mov w17,w28 mov v2.16b,v26.16b lsr x19,x28,#32 mov v6.16b,v26.16b mov w20,w30 mov v18.16b,v26.16b lsr x21,x30,#32 mov x4,#10 subs x2,x2,#256 .Loop_neon: sub x4,x4,#1 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v16.4s,v16.4s,v17.4s add w7,w7,w11 eor v3.16b,v3.16b,v0.16b add w8,w8,w12 eor v7.16b,v7.16b,v4.16b eor w17,w17,w5 eor v19.16b,v19.16b,v16.16b eor w19,w19,w6 rev32 v3.8h,v3.8h eor w20,w20,w7 rev32 v7.8h,v7.8h eor w21,w21,w8 rev32 v19.8h,v19.8h ror w17,w17,#16 add v2.4s,v2.4s,v3.4s ror w19,w19,#16 add v6.4s,v6.4s,v7.4s ror w20,w20,#16 add v18.4s,v18.4s,v19.4s ror w21,w21,#16 eor v20.16b,v1.16b,v2.16b add w13,w13,w17 eor v21.16b,v5.16b,v6.16b add w14,w14,w19 eor v22.16b,v17.16b,v18.16b add w15,w15,w20 ushr v1.4s,v20.4s,#20 add w16,w16,w21 ushr v5.4s,v21.4s,#20 eor w9,w9,w13 ushr v17.4s,v22.4s,#20 eor w10,w10,w14 sli v1.4s,v20.4s,#12 eor w11,w11,w15 sli v5.4s,v21.4s,#12 eor w12,w12,w16 sli v17.4s,v22.4s,#12 ror w9,w9,#20 add v0.4s,v0.4s,v1.4s ror w10,w10,#20 add v4.4s,v4.4s,v5.4s ror w11,w11,#20 add v16.4s,v16.4s,v17.4s ror w12,w12,#20 eor v20.16b,v3.16b,v0.16b add w5,w5,w9 eor v21.16b,v7.16b,v4.16b add w6,w6,w10 eor v22.16b,v19.16b,v16.16b add w7,w7,w11 ushr v3.4s,v20.4s,#24 add w8,w8,w12 ushr v7.4s,v21.4s,#24 eor w17,w17,w5 ushr v19.4s,v22.4s,#24 eor w19,w19,w6 sli v3.4s,v20.4s,#8 eor w20,w20,w7 sli v7.4s,v21.4s,#8 eor w21,w21,w8 sli v19.4s,v22.4s,#8 ror w17,w17,#24 add v2.4s,v2.4s,v3.4s ror w19,w19,#24 add v6.4s,v6.4s,v7.4s ror w20,w20,#24 add v18.4s,v18.4s,v19.4s ror w21,w21,#24 eor v20.16b,v1.16b,v2.16b add w13,w13,w17 eor v21.16b,v5.16b,v6.16b add w14,w14,w19 eor v22.16b,v17.16b,v18.16b add w15,w15,w20 ushr v1.4s,v20.4s,#25 add w16,w16,w21 ushr v5.4s,v21.4s,#25 eor w9,w9,w13 ushr v17.4s,v22.4s,#25 eor w10,w10,w14 sli v1.4s,v20.4s,#7 eor w11,w11,w15 sli v5.4s,v21.4s,#7 eor w12,w12,w16 sli v17.4s,v22.4s,#7 ror w9,w9,#25 ext v2.16b,v2.16b,v2.16b,#8 ror w10,w10,#25 ext v6.16b,v6.16b,v6.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v3.16b,v3.16b,v3.16b,#12 ext v7.16b,v7.16b,v7.16b,#12 ext v19.16b,v19.16b,v19.16b,#12 ext v1.16b,v1.16b,v1.16b,#4 ext v5.16b,v5.16b,v5.16b,#4 ext v17.16b,v17.16b,v17.16b,#4 add v0.4s,v0.4s,v1.4s add w5,w5,w10 add v4.4s,v4.4s,v5.4s add w6,w6,w11 add v16.4s,v16.4s,v17.4s add w7,w7,w12 eor v3.16b,v3.16b,v0.16b add w8,w8,w9 eor v7.16b,v7.16b,v4.16b eor w21,w21,w5 eor v19.16b,v19.16b,v16.16b eor w17,w17,w6 rev32 v3.8h,v3.8h eor w19,w19,w7 rev32 v7.8h,v7.8h eor w20,w20,w8 rev32 v19.8h,v19.8h ror w21,w21,#16 add v2.4s,v2.4s,v3.4s ror w17,w17,#16 add v6.4s,v6.4s,v7.4s ror w19,w19,#16 add v18.4s,v18.4s,v19.4s ror w20,w20,#16 eor v20.16b,v1.16b,v2.16b add w15,w15,w21 eor v21.16b,v5.16b,v6.16b add w16,w16,w17 eor v22.16b,v17.16b,v18.16b add w13,w13,w19 ushr v1.4s,v20.4s,#20 add w14,w14,w20 ushr v5.4s,v21.4s,#20 eor w10,w10,w15 ushr v17.4s,v22.4s,#20 eor w11,w11,w16 sli v1.4s,v20.4s,#12 eor w12,w12,w13 sli v5.4s,v21.4s,#12 eor w9,w9,w14 sli v17.4s,v22.4s,#12 ror w10,w10,#20 add v0.4s,v0.4s,v1.4s ror w11,w11,#20 add v4.4s,v4.4s,v5.4s ror w12,w12,#20 add v16.4s,v16.4s,v17.4s ror w9,w9,#20 eor v20.16b,v3.16b,v0.16b add w5,w5,w10 eor v21.16b,v7.16b,v4.16b add w6,w6,w11 eor v22.16b,v19.16b,v16.16b add w7,w7,w12 ushr v3.4s,v20.4s,#24 add w8,w8,w9 ushr v7.4s,v21.4s,#24 eor w21,w21,w5 ushr v19.4s,v22.4s,#24 eor w17,w17,w6 sli v3.4s,v20.4s,#8 eor w19,w19,w7 sli v7.4s,v21.4s,#8 eor w20,w20,w8 sli v19.4s,v22.4s,#8 ror w21,w21,#24 add v2.4s,v2.4s,v3.4s ror w17,w17,#24 add v6.4s,v6.4s,v7.4s ror w19,w19,#24 add v18.4s,v18.4s,v19.4s ror w20,w20,#24 eor v20.16b,v1.16b,v2.16b add w15,w15,w21 eor v21.16b,v5.16b,v6.16b add w16,w16,w17 eor v22.16b,v17.16b,v18.16b add w13,w13,w19 ushr v1.4s,v20.4s,#25 add w14,w14,w20 ushr v5.4s,v21.4s,#25 eor w10,w10,w15 ushr v17.4s,v22.4s,#25 eor w11,w11,w16 sli v1.4s,v20.4s,#7 eor w12,w12,w13 sli v5.4s,v21.4s,#7 eor w9,w9,w14 sli v17.4s,v22.4s,#7 ror w10,w10,#25 ext v2.16b,v2.16b,v2.16b,#8 ror w11,w11,#25 ext v6.16b,v6.16b,v6.16b,#8 ror w12,w12,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#4 ext v7.16b,v7.16b,v7.16b,#4 ext v19.16b,v19.16b,v19.16b,#4 ext v1.16b,v1.16b,v1.16b,#12 ext v5.16b,v5.16b,v5.16b,#12 ext v17.16b,v17.16b,v17.16b,#12 cbnz x4,.Loop_neon add w5,w5,w22 // accumulate key block add v0.4s,v0.4s,v24.4s add x6,x6,x22,lsr#32 add v4.4s,v4.4s,v24.4s add w7,w7,w23 add v16.4s,v16.4s,v24.4s add x8,x8,x23,lsr#32 add v2.4s,v2.4s,v26.4s add w9,w9,w24 add v6.4s,v6.4s,v26.4s add x10,x10,x24,lsr#32 add v18.4s,v18.4s,v26.4s add w11,w11,w25 add v3.4s,v3.4s,v27.4s add x12,x12,x25,lsr#32 add w13,w13,w26 add v7.4s,v7.4s,v28.4s add x14,x14,x26,lsr#32 add w15,w15,w27 add v19.4s,v19.4s,v29.4s add x16,x16,x27,lsr#32 add w17,w17,w28 add v1.4s,v1.4s,v25.4s add x19,x19,x28,lsr#32 add w20,w20,w30 add v5.4s,v5.4s,v25.4s add x21,x21,x30,lsr#32 add v17.4s,v17.4s,v25.4s b.lo .Ltail_neon add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 ldp x6,x8,[x1,#0] // load input add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 ldp x10,x12,[x1,#16] add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 ldp x14,x16,[x1,#32] add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 ldp x19,x21,[x1,#48] add x1,x1,#64 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor v0.16b,v0.16b,v20.16b eor x15,x15,x16 eor v1.16b,v1.16b,v21.16b eor x17,x17,x19 eor v2.16b,v2.16b,v22.16b eor x20,x20,x21 eor v3.16b,v3.16b,v23.16b ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 stp x5,x7,[x0,#0] // store output add x28,x28,#4 // increment counter stp x9,x11,[x0,#16] add v27.4s,v27.4s,v31.4s // += 4 stp x13,x15,[x0,#32] add v28.4s,v28.4s,v31.4s stp x17,x20,[x0,#48] add v29.4s,v29.4s,v31.4s add x0,x0,#64 st1 {v0.16b,v1.16b,v2.16b,v3.16b},[x0],#64 ld1 {v0.16b,v1.16b,v2.16b,v3.16b},[x1],#64 eor v4.16b,v4.16b,v20.16b eor v5.16b,v5.16b,v21.16b eor v6.16b,v6.16b,v22.16b eor v7.16b,v7.16b,v23.16b st1 {v4.16b,v5.16b,v6.16b,v7.16b},[x0],#64 eor v16.16b,v16.16b,v0.16b eor v17.16b,v17.16b,v1.16b eor v18.16b,v18.16b,v2.16b eor v19.16b,v19.16b,v3.16b st1 {v16.16b,v17.16b,v18.16b,v19.16b},[x0],#64 b.hi .Loop_outer_neon ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .Ltail_neon: add x2,x2,#256 cmp x2,#64 b.lo .Less_than_64 add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 ldp x6,x8,[x1,#0] // load input add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 ldp x10,x12,[x1,#16] add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 ldp x14,x16,[x1,#32] add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 ldp x19,x21,[x1,#48] add x1,x1,#64 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor x15,x15,x16 eor x17,x17,x19 eor x20,x20,x21 stp x5,x7,[x0,#0] // store output add x28,x28,#4 // increment counter stp x9,x11,[x0,#16] stp x13,x15,[x0,#32] stp x17,x20,[x0,#48] add x0,x0,#64 b.eq .Ldone_neon sub x2,x2,#64 cmp x2,#64 b.lo .Less_than_128 ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 eor v0.16b,v0.16b,v20.16b eor v1.16b,v1.16b,v21.16b eor v2.16b,v2.16b,v22.16b eor v3.16b,v3.16b,v23.16b st1 {v0.16b,v1.16b,v2.16b,v3.16b},[x0],#64 b.eq .Ldone_neon sub x2,x2,#64 cmp x2,#64 b.lo .Less_than_192 ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 eor v4.16b,v4.16b,v20.16b eor v5.16b,v5.16b,v21.16b eor v6.16b,v6.16b,v22.16b eor v7.16b,v7.16b,v23.16b st1 {v4.16b,v5.16b,v6.16b,v7.16b},[x0],#64 b.eq .Ldone_neon sub x2,x2,#64 st1 {v16.16b,v17.16b,v18.16b,v19.16b},[sp] b .Last_neon .Less_than_128: st1 {v0.16b,v1.16b,v2.16b,v3.16b},[sp] b .Last_neon .Less_than_192: st1 {v4.16b,v5.16b,v6.16b,v7.16b},[sp] b .Last_neon .align 4 .Last_neon: sub x0,x0,#1 add x1,x1,x2 add x0,x0,x2 add x4,sp,x2 neg x2,x2 .Loop_tail_neon: ldrb w10,[x1,x2] ldrb w11,[x4,x2] add x2,x2,#1 eor w10,w10,w11 strb w10,[x0,x2] cbnz x2,.Loop_tail_neon stp xzr,xzr,[sp,#0] stp xzr,xzr,[sp,#16] stp xzr,xzr,[sp,#32] stp xzr,xzr,[sp,#48] .Ldone_neon: ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .size ChaCha20_ctr32_neon,.-ChaCha20_ctr32_neon .type ChaCha20_512_neon,%function .align 5 ChaCha20_512_neon: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 adrp x5,.Lsigma add x5,x5,:lo12:.Lsigma stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] .L512_or_more_neon: sub sp,sp,#128+64 ldp x22,x23,[x5] // load sigma ld1 {v24.4s},[x5],#16 ldp x24,x25,[x3] // load key ldp x26,x27,[x3,#16] ld1 {v25.4s,v26.4s},[x3] ldp x28,x30,[x4] // load counter ld1 {v27.4s},[x4] ld1 {v31.4s},[x5] #ifdef __AARCH64EB__ rev64 v24.4s,v24.4s ror x24,x24,#32 ror x25,x25,#32 ror x26,x26,#32 ror x27,x27,#32 ror x28,x28,#32 ror x30,x30,#32 #endif add v27.4s,v27.4s,v31.4s // += 1 stp q24,q25,[sp,#0] // off-load key block, invariant part add v27.4s,v27.4s,v31.4s // not typo str q26,[sp,#32] add v28.4s,v27.4s,v31.4s add v29.4s,v28.4s,v31.4s add v30.4s,v29.4s,v31.4s shl v31.4s,v31.4s,#2 // 1 -> 4 stp d8,d9,[sp,#128+0] // meet ABI requirements stp d10,d11,[sp,#128+16] stp d12,d13,[sp,#128+32] stp d14,d15,[sp,#128+48] sub x2,x2,#512 // not typo .Loop_outer_512_neon: mov v0.16b,v24.16b mov v4.16b,v24.16b mov v8.16b,v24.16b mov v12.16b,v24.16b mov v16.16b,v24.16b mov v20.16b,v24.16b mov v1.16b,v25.16b mov w5,w22 // unpack key block mov v5.16b,v25.16b lsr x6,x22,#32 mov v9.16b,v25.16b mov w7,w23 mov v13.16b,v25.16b lsr x8,x23,#32 mov v17.16b,v25.16b mov w9,w24 mov v21.16b,v25.16b lsr x10,x24,#32 mov v3.16b,v27.16b mov w11,w25 mov v7.16b,v28.16b lsr x12,x25,#32 mov v11.16b,v29.16b mov w13,w26 mov v15.16b,v30.16b lsr x14,x26,#32 mov v2.16b,v26.16b mov w15,w27 mov v6.16b,v26.16b lsr x16,x27,#32 add v19.4s,v3.4s,v31.4s // +4 mov w17,w28 add v23.4s,v7.4s,v31.4s // +4 lsr x19,x28,#32 mov v10.16b,v26.16b mov w20,w30 mov v14.16b,v26.16b lsr x21,x30,#32 mov v18.16b,v26.16b stp q27,q28,[sp,#48] // off-load key block, variable part mov v22.16b,v26.16b str q29,[sp,#80] mov x4,#5 subs x2,x2,#512 .Loop_upper_neon: sub x4,x4,#1 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v8.4s,v8.4s,v9.4s add w7,w7,w11 add v12.4s,v12.4s,v13.4s add w8,w8,w12 add v16.4s,v16.4s,v17.4s eor w17,w17,w5 add v20.4s,v20.4s,v21.4s eor w19,w19,w6 eor v3.16b,v3.16b,v0.16b eor w20,w20,w7 eor v7.16b,v7.16b,v4.16b eor w21,w21,w8 eor v11.16b,v11.16b,v8.16b ror w17,w17,#16 eor v15.16b,v15.16b,v12.16b ror w19,w19,#16 eor v19.16b,v19.16b,v16.16b ror w20,w20,#16 eor v23.16b,v23.16b,v20.16b ror w21,w21,#16 rev32 v3.8h,v3.8h add w13,w13,w17 rev32 v7.8h,v7.8h add w14,w14,w19 rev32 v11.8h,v11.8h add w15,w15,w20 rev32 v15.8h,v15.8h add w16,w16,w21 rev32 v19.8h,v19.8h eor w9,w9,w13 rev32 v23.8h,v23.8h eor w10,w10,w14 add v2.4s,v2.4s,v3.4s eor w11,w11,w15 add v6.4s,v6.4s,v7.4s eor w12,w12,w16 add v10.4s,v10.4s,v11.4s ror w9,w9,#20 add v14.4s,v14.4s,v15.4s ror w10,w10,#20 add v18.4s,v18.4s,v19.4s ror w11,w11,#20 add v22.4s,v22.4s,v23.4s ror w12,w12,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w9 eor v25.16b,v5.16b,v6.16b add w6,w6,w10 eor v26.16b,v9.16b,v10.16b add w7,w7,w11 eor v27.16b,v13.16b,v14.16b add w8,w8,w12 eor v28.16b,v17.16b,v18.16b eor w17,w17,w5 eor v29.16b,v21.16b,v22.16b eor w19,w19,w6 ushr v1.4s,v24.4s,#20 eor w20,w20,w7 ushr v5.4s,v25.4s,#20 eor w21,w21,w8 ushr v9.4s,v26.4s,#20 ror w17,w17,#24 ushr v13.4s,v27.4s,#20 ror w19,w19,#24 ushr v17.4s,v28.4s,#20 ror w20,w20,#24 ushr v21.4s,v29.4s,#20 ror w21,w21,#24 sli v1.4s,v24.4s,#12 add w13,w13,w17 sli v5.4s,v25.4s,#12 add w14,w14,w19 sli v9.4s,v26.4s,#12 add w15,w15,w20 sli v13.4s,v27.4s,#12 add w16,w16,w21 sli v17.4s,v28.4s,#12 eor w9,w9,w13 sli v21.4s,v29.4s,#12 eor w10,w10,w14 add v0.4s,v0.4s,v1.4s eor w11,w11,w15 add v4.4s,v4.4s,v5.4s eor w12,w12,w16 add v8.4s,v8.4s,v9.4s ror w9,w9,#25 add v12.4s,v12.4s,v13.4s ror w10,w10,#25 add v16.4s,v16.4s,v17.4s ror w11,w11,#25 add v20.4s,v20.4s,v21.4s ror w12,w12,#25 eor v24.16b,v3.16b,v0.16b add w5,w5,w10 eor v25.16b,v7.16b,v4.16b add w6,w6,w11 eor v26.16b,v11.16b,v8.16b add w7,w7,w12 eor v27.16b,v15.16b,v12.16b add w8,w8,w9 eor v28.16b,v19.16b,v16.16b eor w21,w21,w5 eor v29.16b,v23.16b,v20.16b eor w17,w17,w6 ushr v3.4s,v24.4s,#24 eor w19,w19,w7 ushr v7.4s,v25.4s,#24 eor w20,w20,w8 ushr v11.4s,v26.4s,#24 ror w21,w21,#16 ushr v15.4s,v27.4s,#24 ror w17,w17,#16 ushr v19.4s,v28.4s,#24 ror w19,w19,#16 ushr v23.4s,v29.4s,#24 ror w20,w20,#16 sli v3.4s,v24.4s,#8 add w15,w15,w21 sli v7.4s,v25.4s,#8 add w16,w16,w17 sli v11.4s,v26.4s,#8 add w13,w13,w19 sli v15.4s,v27.4s,#8 add w14,w14,w20 sli v19.4s,v28.4s,#8 eor w10,w10,w15 sli v23.4s,v29.4s,#8 eor w11,w11,w16 add v2.4s,v2.4s,v3.4s eor w12,w12,w13 add v6.4s,v6.4s,v7.4s eor w9,w9,w14 add v10.4s,v10.4s,v11.4s ror w10,w10,#20 add v14.4s,v14.4s,v15.4s ror w11,w11,#20 add v18.4s,v18.4s,v19.4s ror w12,w12,#20 add v22.4s,v22.4s,v23.4s ror w9,w9,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w10 eor v25.16b,v5.16b,v6.16b add w6,w6,w11 eor v26.16b,v9.16b,v10.16b add w7,w7,w12 eor v27.16b,v13.16b,v14.16b add w8,w8,w9 eor v28.16b,v17.16b,v18.16b eor w21,w21,w5 eor v29.16b,v21.16b,v22.16b eor w17,w17,w6 ushr v1.4s,v24.4s,#25 eor w19,w19,w7 ushr v5.4s,v25.4s,#25 eor w20,w20,w8 ushr v9.4s,v26.4s,#25 ror w21,w21,#24 ushr v13.4s,v27.4s,#25 ror w17,w17,#24 ushr v17.4s,v28.4s,#25 ror w19,w19,#24 ushr v21.4s,v29.4s,#25 ror w20,w20,#24 sli v1.4s,v24.4s,#7 add w15,w15,w21 sli v5.4s,v25.4s,#7 add w16,w16,w17 sli v9.4s,v26.4s,#7 add w13,w13,w19 sli v13.4s,v27.4s,#7 add w14,w14,w20 sli v17.4s,v28.4s,#7 eor w10,w10,w15 sli v21.4s,v29.4s,#7 eor w11,w11,w16 ext v2.16b,v2.16b,v2.16b,#8 eor w12,w12,w13 ext v6.16b,v6.16b,v6.16b,#8 eor w9,w9,w14 ext v10.16b,v10.16b,v10.16b,#8 ror w10,w10,#25 ext v14.16b,v14.16b,v14.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v22.16b,v22.16b,v22.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#12 ext v7.16b,v7.16b,v7.16b,#12 ext v11.16b,v11.16b,v11.16b,#12 ext v15.16b,v15.16b,v15.16b,#12 ext v19.16b,v19.16b,v19.16b,#12 ext v23.16b,v23.16b,v23.16b,#12 ext v1.16b,v1.16b,v1.16b,#4 ext v5.16b,v5.16b,v5.16b,#4 ext v9.16b,v9.16b,v9.16b,#4 ext v13.16b,v13.16b,v13.16b,#4 ext v17.16b,v17.16b,v17.16b,#4 ext v21.16b,v21.16b,v21.16b,#4 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v8.4s,v8.4s,v9.4s add w7,w7,w11 add v12.4s,v12.4s,v13.4s add w8,w8,w12 add v16.4s,v16.4s,v17.4s eor w17,w17,w5 add v20.4s,v20.4s,v21.4s eor w19,w19,w6 eor v3.16b,v3.16b,v0.16b eor w20,w20,w7 eor v7.16b,v7.16b,v4.16b eor w21,w21,w8 eor v11.16b,v11.16b,v8.16b ror w17,w17,#16 eor v15.16b,v15.16b,v12.16b ror w19,w19,#16 eor v19.16b,v19.16b,v16.16b ror w20,w20,#16 eor v23.16b,v23.16b,v20.16b ror w21,w21,#16 rev32 v3.8h,v3.8h add w13,w13,w17 rev32 v7.8h,v7.8h add w14,w14,w19 rev32 v11.8h,v11.8h add w15,w15,w20 rev32 v15.8h,v15.8h add w16,w16,w21 rev32 v19.8h,v19.8h eor w9,w9,w13 rev32 v23.8h,v23.8h eor w10,w10,w14 add v2.4s,v2.4s,v3.4s eor w11,w11,w15 add v6.4s,v6.4s,v7.4s eor w12,w12,w16 add v10.4s,v10.4s,v11.4s ror w9,w9,#20 add v14.4s,v14.4s,v15.4s ror w10,w10,#20 add v18.4s,v18.4s,v19.4s ror w11,w11,#20 add v22.4s,v22.4s,v23.4s ror w12,w12,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w9 eor v25.16b,v5.16b,v6.16b add w6,w6,w10 eor v26.16b,v9.16b,v10.16b add w7,w7,w11 eor v27.16b,v13.16b,v14.16b add w8,w8,w12 eor v28.16b,v17.16b,v18.16b eor w17,w17,w5 eor v29.16b,v21.16b,v22.16b eor w19,w19,w6 ushr v1.4s,v24.4s,#20 eor w20,w20,w7 ushr v5.4s,v25.4s,#20 eor w21,w21,w8 ushr v9.4s,v26.4s,#20 ror w17,w17,#24 ushr v13.4s,v27.4s,#20 ror w19,w19,#24 ushr v17.4s,v28.4s,#20 ror w20,w20,#24 ushr v21.4s,v29.4s,#20 ror w21,w21,#24 sli v1.4s,v24.4s,#12 add w13,w13,w17 sli v5.4s,v25.4s,#12 add w14,w14,w19 sli v9.4s,v26.4s,#12 add w15,w15,w20 sli v13.4s,v27.4s,#12 add w16,w16,w21 sli v17.4s,v28.4s,#12 eor w9,w9,w13 sli v21.4s,v29.4s,#12 eor w10,w10,w14 add v0.4s,v0.4s,v1.4s eor w11,w11,w15 add v4.4s,v4.4s,v5.4s eor w12,w12,w16 add v8.4s,v8.4s,v9.4s ror w9,w9,#25 add v12.4s,v12.4s,v13.4s ror w10,w10,#25 add v16.4s,v16.4s,v17.4s ror w11,w11,#25 add v20.4s,v20.4s,v21.4s ror w12,w12,#25 eor v24.16b,v3.16b,v0.16b add w5,w5,w10 eor v25.16b,v7.16b,v4.16b add w6,w6,w11 eor v26.16b,v11.16b,v8.16b add w7,w7,w12 eor v27.16b,v15.16b,v12.16b add w8,w8,w9 eor v28.16b,v19.16b,v16.16b eor w21,w21,w5 eor v29.16b,v23.16b,v20.16b eor w17,w17,w6 ushr v3.4s,v24.4s,#24 eor w19,w19,w7 ushr v7.4s,v25.4s,#24 eor w20,w20,w8 ushr v11.4s,v26.4s,#24 ror w21,w21,#16 ushr v15.4s,v27.4s,#24 ror w17,w17,#16 ushr v19.4s,v28.4s,#24 ror w19,w19,#16 ushr v23.4s,v29.4s,#24 ror w20,w20,#16 sli v3.4s,v24.4s,#8 add w15,w15,w21 sli v7.4s,v25.4s,#8 add w16,w16,w17 sli v11.4s,v26.4s,#8 add w13,w13,w19 sli v15.4s,v27.4s,#8 add w14,w14,w20 sli v19.4s,v28.4s,#8 eor w10,w10,w15 sli v23.4s,v29.4s,#8 eor w11,w11,w16 add v2.4s,v2.4s,v3.4s eor w12,w12,w13 add v6.4s,v6.4s,v7.4s eor w9,w9,w14 add v10.4s,v10.4s,v11.4s ror w10,w10,#20 add v14.4s,v14.4s,v15.4s ror w11,w11,#20 add v18.4s,v18.4s,v19.4s ror w12,w12,#20 add v22.4s,v22.4s,v23.4s ror w9,w9,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w10 eor v25.16b,v5.16b,v6.16b add w6,w6,w11 eor v26.16b,v9.16b,v10.16b add w7,w7,w12 eor v27.16b,v13.16b,v14.16b add w8,w8,w9 eor v28.16b,v17.16b,v18.16b eor w21,w21,w5 eor v29.16b,v21.16b,v22.16b eor w17,w17,w6 ushr v1.4s,v24.4s,#25 eor w19,w19,w7 ushr v5.4s,v25.4s,#25 eor w20,w20,w8 ushr v9.4s,v26.4s,#25 ror w21,w21,#24 ushr v13.4s,v27.4s,#25 ror w17,w17,#24 ushr v17.4s,v28.4s,#25 ror w19,w19,#24 ushr v21.4s,v29.4s,#25 ror w20,w20,#24 sli v1.4s,v24.4s,#7 add w15,w15,w21 sli v5.4s,v25.4s,#7 add w16,w16,w17 sli v9.4s,v26.4s,#7 add w13,w13,w19 sli v13.4s,v27.4s,#7 add w14,w14,w20 sli v17.4s,v28.4s,#7 eor w10,w10,w15 sli v21.4s,v29.4s,#7 eor w11,w11,w16 ext v2.16b,v2.16b,v2.16b,#8 eor w12,w12,w13 ext v6.16b,v6.16b,v6.16b,#8 eor w9,w9,w14 ext v10.16b,v10.16b,v10.16b,#8 ror w10,w10,#25 ext v14.16b,v14.16b,v14.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v22.16b,v22.16b,v22.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#4 ext v7.16b,v7.16b,v7.16b,#4 ext v11.16b,v11.16b,v11.16b,#4 ext v15.16b,v15.16b,v15.16b,#4 ext v19.16b,v19.16b,v19.16b,#4 ext v23.16b,v23.16b,v23.16b,#4 ext v1.16b,v1.16b,v1.16b,#12 ext v5.16b,v5.16b,v5.16b,#12 ext v9.16b,v9.16b,v9.16b,#12 ext v13.16b,v13.16b,v13.16b,#12 ext v17.16b,v17.16b,v17.16b,#12 ext v21.16b,v21.16b,v21.16b,#12 cbnz x4,.Loop_upper_neon add w5,w5,w22 // accumulate key block add x6,x6,x22,lsr#32 add w7,w7,w23 add x8,x8,x23,lsr#32 add w9,w9,w24 add x10,x10,x24,lsr#32 add w11,w11,w25 add x12,x12,x25,lsr#32 add w13,w13,w26 add x14,x14,x26,lsr#32 add w15,w15,w27 add x16,x16,x27,lsr#32 add w17,w17,w28 add x19,x19,x28,lsr#32 add w20,w20,w30 add x21,x21,x30,lsr#32 add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 ldp x6,x8,[x1,#0] // load input add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 ldp x10,x12,[x1,#16] add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 ldp x14,x16,[x1,#32] add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 ldp x19,x21,[x1,#48] add x1,x1,#64 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor x15,x15,x16 eor x17,x17,x19 eor x20,x20,x21 stp x5,x7,[x0,#0] // store output add x28,x28,#1 // increment counter mov w5,w22 // unpack key block lsr x6,x22,#32 stp x9,x11,[x0,#16] mov w7,w23 lsr x8,x23,#32 stp x13,x15,[x0,#32] mov w9,w24 lsr x10,x24,#32 stp x17,x20,[x0,#48] add x0,x0,#64 mov w11,w25 lsr x12,x25,#32 mov w13,w26 lsr x14,x26,#32 mov w15,w27 lsr x16,x27,#32 mov w17,w28 lsr x19,x28,#32 mov w20,w30 lsr x21,x30,#32 mov x4,#5 .Loop_lower_neon: sub x4,x4,#1 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v8.4s,v8.4s,v9.4s add w7,w7,w11 add v12.4s,v12.4s,v13.4s add w8,w8,w12 add v16.4s,v16.4s,v17.4s eor w17,w17,w5 add v20.4s,v20.4s,v21.4s eor w19,w19,w6 eor v3.16b,v3.16b,v0.16b eor w20,w20,w7 eor v7.16b,v7.16b,v4.16b eor w21,w21,w8 eor v11.16b,v11.16b,v8.16b ror w17,w17,#16 eor v15.16b,v15.16b,v12.16b ror w19,w19,#16 eor v19.16b,v19.16b,v16.16b ror w20,w20,#16 eor v23.16b,v23.16b,v20.16b ror w21,w21,#16 rev32 v3.8h,v3.8h add w13,w13,w17 rev32 v7.8h,v7.8h add w14,w14,w19 rev32 v11.8h,v11.8h add w15,w15,w20 rev32 v15.8h,v15.8h add w16,w16,w21 rev32 v19.8h,v19.8h eor w9,w9,w13 rev32 v23.8h,v23.8h eor w10,w10,w14 add v2.4s,v2.4s,v3.4s eor w11,w11,w15 add v6.4s,v6.4s,v7.4s eor w12,w12,w16 add v10.4s,v10.4s,v11.4s ror w9,w9,#20 add v14.4s,v14.4s,v15.4s ror w10,w10,#20 add v18.4s,v18.4s,v19.4s ror w11,w11,#20 add v22.4s,v22.4s,v23.4s ror w12,w12,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w9 eor v25.16b,v5.16b,v6.16b add w6,w6,w10 eor v26.16b,v9.16b,v10.16b add w7,w7,w11 eor v27.16b,v13.16b,v14.16b add w8,w8,w12 eor v28.16b,v17.16b,v18.16b eor w17,w17,w5 eor v29.16b,v21.16b,v22.16b eor w19,w19,w6 ushr v1.4s,v24.4s,#20 eor w20,w20,w7 ushr v5.4s,v25.4s,#20 eor w21,w21,w8 ushr v9.4s,v26.4s,#20 ror w17,w17,#24 ushr v13.4s,v27.4s,#20 ror w19,w19,#24 ushr v17.4s,v28.4s,#20 ror w20,w20,#24 ushr v21.4s,v29.4s,#20 ror w21,w21,#24 sli v1.4s,v24.4s,#12 add w13,w13,w17 sli v5.4s,v25.4s,#12 add w14,w14,w19 sli v9.4s,v26.4s,#12 add w15,w15,w20 sli v13.4s,v27.4s,#12 add w16,w16,w21 sli v17.4s,v28.4s,#12 eor w9,w9,w13 sli v21.4s,v29.4s,#12 eor w10,w10,w14 add v0.4s,v0.4s,v1.4s eor w11,w11,w15 add v4.4s,v4.4s,v5.4s eor w12,w12,w16 add v8.4s,v8.4s,v9.4s ror w9,w9,#25 add v12.4s,v12.4s,v13.4s ror w10,w10,#25 add v16.4s,v16.4s,v17.4s ror w11,w11,#25 add v20.4s,v20.4s,v21.4s ror w12,w12,#25 eor v24.16b,v3.16b,v0.16b add w5,w5,w10 eor v25.16b,v7.16b,v4.16b add w6,w6,w11 eor v26.16b,v11.16b,v8.16b add w7,w7,w12 eor v27.16b,v15.16b,v12.16b add w8,w8,w9 eor v28.16b,v19.16b,v16.16b eor w21,w21,w5 eor v29.16b,v23.16b,v20.16b eor w17,w17,w6 ushr v3.4s,v24.4s,#24 eor w19,w19,w7 ushr v7.4s,v25.4s,#24 eor w20,w20,w8 ushr v11.4s,v26.4s,#24 ror w21,w21,#16 ushr v15.4s,v27.4s,#24 ror w17,w17,#16 ushr v19.4s,v28.4s,#24 ror w19,w19,#16 ushr v23.4s,v29.4s,#24 ror w20,w20,#16 sli v3.4s,v24.4s,#8 add w15,w15,w21 sli v7.4s,v25.4s,#8 add w16,w16,w17 sli v11.4s,v26.4s,#8 add w13,w13,w19 sli v15.4s,v27.4s,#8 add w14,w14,w20 sli v19.4s,v28.4s,#8 eor w10,w10,w15 sli v23.4s,v29.4s,#8 eor w11,w11,w16 add v2.4s,v2.4s,v3.4s eor w12,w12,w13 add v6.4s,v6.4s,v7.4s eor w9,w9,w14 add v10.4s,v10.4s,v11.4s ror w10,w10,#20 add v14.4s,v14.4s,v15.4s ror w11,w11,#20 add v18.4s,v18.4s,v19.4s ror w12,w12,#20 add v22.4s,v22.4s,v23.4s ror w9,w9,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w10 eor v25.16b,v5.16b,v6.16b add w6,w6,w11 eor v26.16b,v9.16b,v10.16b add w7,w7,w12 eor v27.16b,v13.16b,v14.16b add w8,w8,w9 eor v28.16b,v17.16b,v18.16b eor w21,w21,w5 eor v29.16b,v21.16b,v22.16b eor w17,w17,w6 ushr v1.4s,v24.4s,#25 eor w19,w19,w7 ushr v5.4s,v25.4s,#25 eor w20,w20,w8 ushr v9.4s,v26.4s,#25 ror w21,w21,#24 ushr v13.4s,v27.4s,#25 ror w17,w17,#24 ushr v17.4s,v28.4s,#25 ror w19,w19,#24 ushr v21.4s,v29.4s,#25 ror w20,w20,#24 sli v1.4s,v24.4s,#7 add w15,w15,w21 sli v5.4s,v25.4s,#7 add w16,w16,w17 sli v9.4s,v26.4s,#7 add w13,w13,w19 sli v13.4s,v27.4s,#7 add w14,w14,w20 sli v17.4s,v28.4s,#7 eor w10,w10,w15 sli v21.4s,v29.4s,#7 eor w11,w11,w16 ext v2.16b,v2.16b,v2.16b,#8 eor w12,w12,w13 ext v6.16b,v6.16b,v6.16b,#8 eor w9,w9,w14 ext v10.16b,v10.16b,v10.16b,#8 ror w10,w10,#25 ext v14.16b,v14.16b,v14.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v22.16b,v22.16b,v22.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#12 ext v7.16b,v7.16b,v7.16b,#12 ext v11.16b,v11.16b,v11.16b,#12 ext v15.16b,v15.16b,v15.16b,#12 ext v19.16b,v19.16b,v19.16b,#12 ext v23.16b,v23.16b,v23.16b,#12 ext v1.16b,v1.16b,v1.16b,#4 ext v5.16b,v5.16b,v5.16b,#4 ext v9.16b,v9.16b,v9.16b,#4 ext v13.16b,v13.16b,v13.16b,#4 ext v17.16b,v17.16b,v17.16b,#4 ext v21.16b,v21.16b,v21.16b,#4 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v8.4s,v8.4s,v9.4s add w7,w7,w11 add v12.4s,v12.4s,v13.4s add w8,w8,w12 add v16.4s,v16.4s,v17.4s eor w17,w17,w5 add v20.4s,v20.4s,v21.4s eor w19,w19,w6 eor v3.16b,v3.16b,v0.16b eor w20,w20,w7 eor v7.16b,v7.16b,v4.16b eor w21,w21,w8 eor v11.16b,v11.16b,v8.16b ror w17,w17,#16 eor v15.16b,v15.16b,v12.16b ror w19,w19,#16 eor v19.16b,v19.16b,v16.16b ror w20,w20,#16 eor v23.16b,v23.16b,v20.16b ror w21,w21,#16 rev32 v3.8h,v3.8h add w13,w13,w17 rev32 v7.8h,v7.8h add w14,w14,w19 rev32 v11.8h,v11.8h add w15,w15,w20 rev32 v15.8h,v15.8h add w16,w16,w21 rev32 v19.8h,v19.8h eor w9,w9,w13 rev32 v23.8h,v23.8h eor w10,w10,w14 add v2.4s,v2.4s,v3.4s eor w11,w11,w15 add v6.4s,v6.4s,v7.4s eor w12,w12,w16 add v10.4s,v10.4s,v11.4s ror w9,w9,#20 add v14.4s,v14.4s,v15.4s ror w10,w10,#20 add v18.4s,v18.4s,v19.4s ror w11,w11,#20 add v22.4s,v22.4s,v23.4s ror w12,w12,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w9 eor v25.16b,v5.16b,v6.16b add w6,w6,w10 eor v26.16b,v9.16b,v10.16b add w7,w7,w11 eor v27.16b,v13.16b,v14.16b add w8,w8,w12 eor v28.16b,v17.16b,v18.16b eor w17,w17,w5 eor v29.16b,v21.16b,v22.16b eor w19,w19,w6 ushr v1.4s,v24.4s,#20 eor w20,w20,w7 ushr v5.4s,v25.4s,#20 eor w21,w21,w8 ushr v9.4s,v26.4s,#20 ror w17,w17,#24 ushr v13.4s,v27.4s,#20 ror w19,w19,#24 ushr v17.4s,v28.4s,#20 ror w20,w20,#24 ushr v21.4s,v29.4s,#20 ror w21,w21,#24 sli v1.4s,v24.4s,#12 add w13,w13,w17 sli v5.4s,v25.4s,#12 add w14,w14,w19 sli v9.4s,v26.4s,#12 add w15,w15,w20 sli v13.4s,v27.4s,#12 add w16,w16,w21 sli v17.4s,v28.4s,#12 eor w9,w9,w13 sli v21.4s,v29.4s,#12 eor w10,w10,w14 add v0.4s,v0.4s,v1.4s eor w11,w11,w15 add v4.4s,v4.4s,v5.4s eor w12,w12,w16 add v8.4s,v8.4s,v9.4s ror w9,w9,#25 add v12.4s,v12.4s,v13.4s ror w10,w10,#25 add v16.4s,v16.4s,v17.4s ror w11,w11,#25 add v20.4s,v20.4s,v21.4s ror w12,w12,#25 eor v24.16b,v3.16b,v0.16b add w5,w5,w10 eor v25.16b,v7.16b,v4.16b add w6,w6,w11 eor v26.16b,v11.16b,v8.16b add w7,w7,w12 eor v27.16b,v15.16b,v12.16b add w8,w8,w9 eor v28.16b,v19.16b,v16.16b eor w21,w21,w5 eor v29.16b,v23.16b,v20.16b eor w17,w17,w6 ushr v3.4s,v24.4s,#24 eor w19,w19,w7 ushr v7.4s,v25.4s,#24 eor w20,w20,w8 ushr v11.4s,v26.4s,#24 ror w21,w21,#16 ushr v15.4s,v27.4s,#24 ror w17,w17,#16 ushr v19.4s,v28.4s,#24 ror w19,w19,#16 ushr v23.4s,v29.4s,#24 ror w20,w20,#16 sli v3.4s,v24.4s,#8 add w15,w15,w21 sli v7.4s,v25.4s,#8 add w16,w16,w17 sli v11.4s,v26.4s,#8 add w13,w13,w19 sli v15.4s,v27.4s,#8 add w14,w14,w20 sli v19.4s,v28.4s,#8 eor w10,w10,w15 sli v23.4s,v29.4s,#8 eor w11,w11,w16 add v2.4s,v2.4s,v3.4s eor w12,w12,w13 add v6.4s,v6.4s,v7.4s eor w9,w9,w14 add v10.4s,v10.4s,v11.4s ror w10,w10,#20 add v14.4s,v14.4s,v15.4s ror w11,w11,#20 add v18.4s,v18.4s,v19.4s ror w12,w12,#20 add v22.4s,v22.4s,v23.4s ror w9,w9,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w10 eor v25.16b,v5.16b,v6.16b add w6,w6,w11 eor v26.16b,v9.16b,v10.16b add w7,w7,w12 eor v27.16b,v13.16b,v14.16b add w8,w8,w9 eor v28.16b,v17.16b,v18.16b eor w21,w21,w5 eor v29.16b,v21.16b,v22.16b eor w17,w17,w6 ushr v1.4s,v24.4s,#25 eor w19,w19,w7 ushr v5.4s,v25.4s,#25 eor w20,w20,w8 ushr v9.4s,v26.4s,#25 ror w21,w21,#24 ushr v13.4s,v27.4s,#25 ror w17,w17,#24 ushr v17.4s,v28.4s,#25 ror w19,w19,#24 ushr v21.4s,v29.4s,#25 ror w20,w20,#24 sli v1.4s,v24.4s,#7 add w15,w15,w21 sli v5.4s,v25.4s,#7 add w16,w16,w17 sli v9.4s,v26.4s,#7 add w13,w13,w19 sli v13.4s,v27.4s,#7 add w14,w14,w20 sli v17.4s,v28.4s,#7 eor w10,w10,w15 sli v21.4s,v29.4s,#7 eor w11,w11,w16 ext v2.16b,v2.16b,v2.16b,#8 eor w12,w12,w13 ext v6.16b,v6.16b,v6.16b,#8 eor w9,w9,w14 ext v10.16b,v10.16b,v10.16b,#8 ror w10,w10,#25 ext v14.16b,v14.16b,v14.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v22.16b,v22.16b,v22.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#4 ext v7.16b,v7.16b,v7.16b,#4 ext v11.16b,v11.16b,v11.16b,#4 ext v15.16b,v15.16b,v15.16b,#4 ext v19.16b,v19.16b,v19.16b,#4 ext v23.16b,v23.16b,v23.16b,#4 ext v1.16b,v1.16b,v1.16b,#12 ext v5.16b,v5.16b,v5.16b,#12 ext v9.16b,v9.16b,v9.16b,#12 ext v13.16b,v13.16b,v13.16b,#12 ext v17.16b,v17.16b,v17.16b,#12 ext v21.16b,v21.16b,v21.16b,#12 cbnz x4,.Loop_lower_neon add w5,w5,w22 // accumulate key block ldp q24,q25,[sp,#0] add x6,x6,x22,lsr#32 ldp q26,q27,[sp,#32] add w7,w7,w23 ldp q28,q29,[sp,#64] add x8,x8,x23,lsr#32 add v0.4s,v0.4s,v24.4s add w9,w9,w24 add v4.4s,v4.4s,v24.4s add x10,x10,x24,lsr#32 add v8.4s,v8.4s,v24.4s add w11,w11,w25 add v12.4s,v12.4s,v24.4s add x12,x12,x25,lsr#32 add v16.4s,v16.4s,v24.4s add w13,w13,w26 add v20.4s,v20.4s,v24.4s add x14,x14,x26,lsr#32 add v2.4s,v2.4s,v26.4s add w15,w15,w27 add v6.4s,v6.4s,v26.4s add x16,x16,x27,lsr#32 add v10.4s,v10.4s,v26.4s add w17,w17,w28 add v14.4s,v14.4s,v26.4s add x19,x19,x28,lsr#32 add v18.4s,v18.4s,v26.4s add w20,w20,w30 add v22.4s,v22.4s,v26.4s add x21,x21,x30,lsr#32 add v19.4s,v19.4s,v31.4s // +4 add x5,x5,x6,lsl#32 // pack add v23.4s,v23.4s,v31.4s // +4 add x7,x7,x8,lsl#32 add v3.4s,v3.4s,v27.4s ldp x6,x8,[x1,#0] // load input add v7.4s,v7.4s,v28.4s add x9,x9,x10,lsl#32 add v11.4s,v11.4s,v29.4s add x11,x11,x12,lsl#32 add v15.4s,v15.4s,v30.4s ldp x10,x12,[x1,#16] add v19.4s,v19.4s,v27.4s add x13,x13,x14,lsl#32 add v23.4s,v23.4s,v28.4s add x15,x15,x16,lsl#32 add v1.4s,v1.4s,v25.4s ldp x14,x16,[x1,#32] add v5.4s,v5.4s,v25.4s add x17,x17,x19,lsl#32 add v9.4s,v9.4s,v25.4s add x20,x20,x21,lsl#32 add v13.4s,v13.4s,v25.4s ldp x19,x21,[x1,#48] add v17.4s,v17.4s,v25.4s add x1,x1,#64 add v21.4s,v21.4s,v25.4s #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif ld1 {v24.16b,v25.16b,v26.16b,v27.16b},[x1],#64 eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor v0.16b,v0.16b,v24.16b eor x15,x15,x16 eor v1.16b,v1.16b,v25.16b eor x17,x17,x19 eor v2.16b,v2.16b,v26.16b eor x20,x20,x21 eor v3.16b,v3.16b,v27.16b ld1 {v24.16b,v25.16b,v26.16b,v27.16b},[x1],#64 stp x5,x7,[x0,#0] // store output add x28,x28,#7 // increment counter stp x9,x11,[x0,#16] stp x13,x15,[x0,#32] stp x17,x20,[x0,#48] add x0,x0,#64 st1 {v0.16b,v1.16b,v2.16b,v3.16b},[x0],#64 ld1 {v0.16b,v1.16b,v2.16b,v3.16b},[x1],#64 eor v4.16b,v4.16b,v24.16b eor v5.16b,v5.16b,v25.16b eor v6.16b,v6.16b,v26.16b eor v7.16b,v7.16b,v27.16b st1 {v4.16b,v5.16b,v6.16b,v7.16b},[x0],#64 ld1 {v4.16b,v5.16b,v6.16b,v7.16b},[x1],#64 eor v8.16b,v8.16b,v0.16b ldp q24,q25,[sp,#0] eor v9.16b,v9.16b,v1.16b ldp q26,q27,[sp,#32] eor v10.16b,v10.16b,v2.16b eor v11.16b,v11.16b,v3.16b st1 {v8.16b,v9.16b,v10.16b,v11.16b},[x0],#64 ld1 {v8.16b,v9.16b,v10.16b,v11.16b},[x1],#64 eor v12.16b,v12.16b,v4.16b eor v13.16b,v13.16b,v5.16b eor v14.16b,v14.16b,v6.16b eor v15.16b,v15.16b,v7.16b st1 {v12.16b,v13.16b,v14.16b,v15.16b},[x0],#64 ld1 {v12.16b,v13.16b,v14.16b,v15.16b},[x1],#64 eor v16.16b,v16.16b,v8.16b eor v17.16b,v17.16b,v9.16b eor v18.16b,v18.16b,v10.16b eor v19.16b,v19.16b,v11.16b st1 {v16.16b,v17.16b,v18.16b,v19.16b},[x0],#64 shl v0.4s,v31.4s,#1 // 4 -> 8 eor v20.16b,v20.16b,v12.16b eor v21.16b,v21.16b,v13.16b eor v22.16b,v22.16b,v14.16b eor v23.16b,v23.16b,v15.16b st1 {v20.16b,v21.16b,v22.16b,v23.16b},[x0],#64 add v27.4s,v27.4s,v0.4s // += 8 add v28.4s,v28.4s,v0.4s add v29.4s,v29.4s,v0.4s add v30.4s,v30.4s,v0.4s b.hs .Loop_outer_512_neon adds x2,x2,#512 ushr v0.4s,v31.4s,#2 // 4 -> 1 ldp d8,d9,[sp,#128+0] // meet ABI requirements ldp d10,d11,[sp,#128+16] ldp d12,d13,[sp,#128+32] ldp d14,d15,[sp,#128+48] stp q24,q31,[sp,#0] // wipe off-load area stp q24,q31,[sp,#32] stp q24,q31,[sp,#64] b.eq .Ldone_512_neon cmp x2,#192 sub v27.4s,v27.4s,v0.4s // -= 1 sub v28.4s,v28.4s,v0.4s sub v29.4s,v29.4s,v0.4s add sp,sp,#128 b.hs .Loop_outer_neon eor v25.16b,v25.16b,v25.16b eor v26.16b,v26.16b,v26.16b eor v27.16b,v27.16b,v27.16b eor v28.16b,v28.16b,v28.16b eor v29.16b,v29.16b,v29.16b eor v30.16b,v30.16b,v30.16b b .Loop_outer .Ldone_512_neon: ldp x19,x20,[x29,#16] add sp,sp,#128+64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .size ChaCha20_512_neon,.-ChaCha20_512_neon #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
74,345
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__ELF__) #include <openssl/arm_arch.h> .section .rodata .align 7 .Lchacha20_consts: .byte 'e','x','p','a','n','d',' ','3','2','-','b','y','t','e',' ','k' .Linc: .long 1,2,3,4 .Lrol8: .byte 3,0,1,2, 7,4,5,6, 11,8,9,10, 15,12,13,14 .Lclamp: .quad 0x0FFFFFFC0FFFFFFF, 0x0FFFFFFC0FFFFFFC .text .type .Lpoly_hash_ad_internal,%function .align 6 .Lpoly_hash_ad_internal: .cfi_startproc cbnz x4, .Lpoly_hash_intro ret .Lpoly_hash_intro: cmp x4, #16 b.lt .Lpoly_hash_ad_tail ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most sub x4, x4, #16 b .Lpoly_hash_ad_internal .Lpoly_hash_ad_tail: cbz x4, .Lpoly_hash_ad_ret eor v20.16b, v20.16b, v20.16b // Use T0 to load the AAD sub x4, x4, #1 .Lpoly_hash_tail_16_compose: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x3, x4] mov v20.b[0], w11 subs x4, x4, #1 b.ge .Lpoly_hash_tail_16_compose mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most .Lpoly_hash_ad_ret: ret .cfi_endproc .size .Lpoly_hash_ad_internal, .-.Lpoly_hash_ad_internal ///////////////////////////////// // // void chacha20_poly1305_seal(uint8_t *pt, uint8_t *ct, size_t len_in, uint8_t *ad, size_t len_ad, union open_data *seal_data); // .globl chacha20_poly1305_seal .hidden chacha20_poly1305_seal .type chacha20_poly1305_seal,%function .align 6 chacha20_poly1305_seal: AARCH64_SIGN_LINK_REGISTER .cfi_startproc stp x29, x30, [sp, #-80]! .cfi_def_cfa_offset 80 .cfi_offset w30, -72 .cfi_offset w29, -80 mov x29, sp // We probably could do .cfi_def_cfa w29, 80 at this point, but since // we don't actually use the frame pointer like that, it's probably not // worth bothering. stp d8, d9, [sp, #16] stp d10, d11, [sp, #32] stp d12, d13, [sp, #48] stp d14, d15, [sp, #64] .cfi_offset b15, -8 .cfi_offset b14, -16 .cfi_offset b13, -24 .cfi_offset b12, -32 .cfi_offset b11, -40 .cfi_offset b10, -48 .cfi_offset b9, -56 .cfi_offset b8, -64 adrp x11, .Lchacha20_consts add x11, x11, :lo12:.Lchacha20_consts ld1 {v24.16b - v27.16b}, [x11] // .Load the CONSTS, INC, ROL8 and CLAMP values ld1 {v28.16b - v30.16b}, [x5] mov x15, #1 // Prepare the Poly1305 state mov x8, #0 mov x9, #0 mov x10, #0 ldr x12, [x5, #56] // The total cipher text length includes extra_in_len add x12, x12, x2 mov v31.d[0], x4 // Store the input and aad lengths mov v31.d[1], x12 cmp x2, #128 b.le .Lseal_128 // Optimization for smaller buffers // Initially we prepare 5 ChaCha20 blocks. Four to encrypt up to 4 blocks (256 bytes) of plaintext, // and one for the Poly1305 R and S keys. The first four blocks (A0-A3..D0-D3) are computed vertically, // the fifth block (A4-D4) horizontally. ld4r {v0.4s,v1.4s,v2.4s,v3.4s}, [x11] mov v4.16b, v24.16b ld4r {v5.4s,v6.4s,v7.4s,v8.4s}, [x5], #16 mov v9.16b, v28.16b ld4r {v10.4s,v11.4s,v12.4s,v13.4s}, [x5], #16 mov v14.16b, v29.16b ld4r {v15.4s,v16.4s,v17.4s,v18.4s}, [x5] add v15.4s, v15.4s, v25.4s mov v19.16b, v30.16b sub x5, x5, #32 mov x6, #10 .align 5 .Lseal_init_rounds: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v9.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v18.8h, v18.8h rev32 v19.8h, v19.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b eor v8.16b, v8.16b, v13.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v9.4s, #20 sli v8.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s add v3.4s, v3.4s, v7.4s add v4.4s, v4.4s, v8.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v18.16b, {v18.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v14.16b ushr v9.4s, v8.4s, #25 sli v9.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #4 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #12 add v0.4s, v0.4s, v6.4s add v1.4s, v1.4s, v7.4s add v2.4s, v2.4s, v8.4s add v3.4s, v3.4s, v5.4s add v4.4s, v4.4s, v9.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v18.8h, v18.8h rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v19.8h, v19.8h add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v5.4s, #20 sli v8.4s, v5.4s, #12 ushr v5.4s, v9.4s, #20 sli v5.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v5.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v18.16b, {v18.16b}, v26.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v12.16b eor v6.16b, v6.16b, v13.16b eor v7.16b, v7.16b, v10.16b eor v8.16b, v8.16b, v11.16b eor v5.16b, v5.16b, v14.16b ushr v9.4s, v5.4s, #25 sli v9.4s, v5.4s, #7 ushr v5.4s, v8.4s, #25 sli v5.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #12 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #4 subs x6, x6, #1 b.hi .Lseal_init_rounds add v15.4s, v15.4s, v25.4s mov x11, #4 dup v20.4s, w11 add v25.4s, v25.4s, v20.4s zip1 v20.4s, v0.4s, v1.4s zip2 v21.4s, v0.4s, v1.4s zip1 v22.4s, v2.4s, v3.4s zip2 v23.4s, v2.4s, v3.4s zip1 v0.2d, v20.2d, v22.2d zip2 v1.2d, v20.2d, v22.2d zip1 v2.2d, v21.2d, v23.2d zip2 v3.2d, v21.2d, v23.2d zip1 v20.4s, v5.4s, v6.4s zip2 v21.4s, v5.4s, v6.4s zip1 v22.4s, v7.4s, v8.4s zip2 v23.4s, v7.4s, v8.4s zip1 v5.2d, v20.2d, v22.2d zip2 v6.2d, v20.2d, v22.2d zip1 v7.2d, v21.2d, v23.2d zip2 v8.2d, v21.2d, v23.2d zip1 v20.4s, v10.4s, v11.4s zip2 v21.4s, v10.4s, v11.4s zip1 v22.4s, v12.4s, v13.4s zip2 v23.4s, v12.4s, v13.4s zip1 v10.2d, v20.2d, v22.2d zip2 v11.2d, v20.2d, v22.2d zip1 v12.2d, v21.2d, v23.2d zip2 v13.2d, v21.2d, v23.2d zip1 v20.4s, v15.4s, v16.4s zip2 v21.4s, v15.4s, v16.4s zip1 v22.4s, v17.4s, v18.4s zip2 v23.4s, v17.4s, v18.4s zip1 v15.2d, v20.2d, v22.2d zip2 v16.2d, v20.2d, v22.2d zip1 v17.2d, v21.2d, v23.2d zip2 v18.2d, v21.2d, v23.2d add v4.4s, v4.4s, v24.4s add v9.4s, v9.4s, v28.4s and v4.16b, v4.16b, v27.16b add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s add v10.4s, v10.4s, v29.4s add v15.4s, v15.4s, v30.4s add v1.4s, v1.4s, v24.4s add v6.4s, v6.4s, v28.4s add v11.4s, v11.4s, v29.4s add v16.4s, v16.4s, v30.4s add v2.4s, v2.4s, v24.4s add v7.4s, v7.4s, v28.4s add v12.4s, v12.4s, v29.4s add v17.4s, v17.4s, v30.4s add v3.4s, v3.4s, v24.4s add v8.4s, v8.4s, v28.4s add v13.4s, v13.4s, v29.4s add v18.4s, v18.4s, v30.4s mov x16, v4.d[0] // Move the R key to GPRs mov x17, v4.d[1] mov v27.16b, v9.16b // Store the S key bl .Lpoly_hash_ad_internal mov x3, x0 cmp x2, #256 b.le .Lseal_tail ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v2.16b eor v21.16b, v21.16b, v7.16b eor v22.16b, v22.16b, v12.16b eor v23.16b, v23.16b, v17.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v3.16b eor v21.16b, v21.16b, v8.16b eor v22.16b, v22.16b, v13.16b eor v23.16b, v23.16b, v18.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #256 mov x6, #4 // In the first run of the loop we need to hash 256 bytes, therefore we hash one block for the first 4 rounds mov x7, #6 // and two blocks for the remaining 6, for a total of (1 * 4 + 2 * 6) * 16 = 256 .Lseal_main_loop: adrp x11, .Lchacha20_consts add x11, x11, :lo12:.Lchacha20_consts ld4r {v0.4s,v1.4s,v2.4s,v3.4s}, [x11] mov v4.16b, v24.16b ld4r {v5.4s,v6.4s,v7.4s,v8.4s}, [x5], #16 mov v9.16b, v28.16b ld4r {v10.4s,v11.4s,v12.4s,v13.4s}, [x5], #16 mov v14.16b, v29.16b ld4r {v15.4s,v16.4s,v17.4s,v18.4s}, [x5] add v15.4s, v15.4s, v25.4s mov v19.16b, v30.16b eor v20.16b, v20.16b, v20.16b //zero not v21.16b, v20.16b // -1 sub v21.4s, v25.4s, v21.4s // Add +1 ext v20.16b, v21.16b, v20.16b, #12 // Get the last element (counter) add v19.4s, v19.4s, v20.4s sub x5, x5, #32 .align 5 .Lseal_main_loop_rounds: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v9.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v18.8h, v18.8h rev32 v19.8h, v19.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b eor v8.16b, v8.16b, v13.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v9.4s, #20 sli v8.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s add v3.4s, v3.4s, v7.4s add v4.4s, v4.4s, v8.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v18.16b, {v18.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v14.16b ushr v9.4s, v8.4s, #25 sli v9.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #4 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #12 ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most add v0.4s, v0.4s, v6.4s add v1.4s, v1.4s, v7.4s add v2.4s, v2.4s, v8.4s add v3.4s, v3.4s, v5.4s add v4.4s, v4.4s, v9.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v18.8h, v18.8h rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v19.8h, v19.8h add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v5.4s, #20 sli v8.4s, v5.4s, #12 ushr v5.4s, v9.4s, #20 sli v5.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v5.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v18.16b, {v18.16b}, v26.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v12.16b eor v6.16b, v6.16b, v13.16b eor v7.16b, v7.16b, v10.16b eor v8.16b, v8.16b, v11.16b eor v5.16b, v5.16b, v14.16b ushr v9.4s, v5.4s, #25 sli v9.4s, v5.4s, #7 ushr v5.4s, v8.4s, #25 sli v5.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #12 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #4 subs x6, x6, #1 b.ge .Lseal_main_loop_rounds ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most subs x7, x7, #1 b.gt .Lseal_main_loop_rounds eor v20.16b, v20.16b, v20.16b //zero not v21.16b, v20.16b // -1 sub v21.4s, v25.4s, v21.4s // Add +1 ext v20.16b, v21.16b, v20.16b, #12 // Get the last element (counter) add v19.4s, v19.4s, v20.4s add v15.4s, v15.4s, v25.4s mov x11, #5 dup v20.4s, w11 add v25.4s, v25.4s, v20.4s zip1 v20.4s, v0.4s, v1.4s zip2 v21.4s, v0.4s, v1.4s zip1 v22.4s, v2.4s, v3.4s zip2 v23.4s, v2.4s, v3.4s zip1 v0.2d, v20.2d, v22.2d zip2 v1.2d, v20.2d, v22.2d zip1 v2.2d, v21.2d, v23.2d zip2 v3.2d, v21.2d, v23.2d zip1 v20.4s, v5.4s, v6.4s zip2 v21.4s, v5.4s, v6.4s zip1 v22.4s, v7.4s, v8.4s zip2 v23.4s, v7.4s, v8.4s zip1 v5.2d, v20.2d, v22.2d zip2 v6.2d, v20.2d, v22.2d zip1 v7.2d, v21.2d, v23.2d zip2 v8.2d, v21.2d, v23.2d zip1 v20.4s, v10.4s, v11.4s zip2 v21.4s, v10.4s, v11.4s zip1 v22.4s, v12.4s, v13.4s zip2 v23.4s, v12.4s, v13.4s zip1 v10.2d, v20.2d, v22.2d zip2 v11.2d, v20.2d, v22.2d zip1 v12.2d, v21.2d, v23.2d zip2 v13.2d, v21.2d, v23.2d zip1 v20.4s, v15.4s, v16.4s zip2 v21.4s, v15.4s, v16.4s zip1 v22.4s, v17.4s, v18.4s zip2 v23.4s, v17.4s, v18.4s zip1 v15.2d, v20.2d, v22.2d zip2 v16.2d, v20.2d, v22.2d zip1 v17.2d, v21.2d, v23.2d zip2 v18.2d, v21.2d, v23.2d add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s add v10.4s, v10.4s, v29.4s add v15.4s, v15.4s, v30.4s add v1.4s, v1.4s, v24.4s add v6.4s, v6.4s, v28.4s add v11.4s, v11.4s, v29.4s add v16.4s, v16.4s, v30.4s add v2.4s, v2.4s, v24.4s add v7.4s, v7.4s, v28.4s add v12.4s, v12.4s, v29.4s add v17.4s, v17.4s, v30.4s add v3.4s, v3.4s, v24.4s add v8.4s, v8.4s, v28.4s add v13.4s, v13.4s, v29.4s add v18.4s, v18.4s, v30.4s add v4.4s, v4.4s, v24.4s add v9.4s, v9.4s, v28.4s add v14.4s, v14.4s, v29.4s add v19.4s, v19.4s, v30.4s cmp x2, #320 b.le .Lseal_tail ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v2.16b eor v21.16b, v21.16b, v7.16b eor v22.16b, v22.16b, v12.16b eor v23.16b, v23.16b, v17.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v3.16b eor v21.16b, v21.16b, v8.16b eor v22.16b, v22.16b, v13.16b eor v23.16b, v23.16b, v18.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v4.16b eor v21.16b, v21.16b, v9.16b eor v22.16b, v22.16b, v14.16b eor v23.16b, v23.16b, v19.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #320 mov x6, #0 mov x7, #10 // For the remainder of the loop we always hash and encrypt 320 bytes per iteration b .Lseal_main_loop .Lseal_tail: // This part of the function handles the storage and authentication of the last [0,320) bytes // We assume A0-A4 ... D0-D4 hold at least inl (320 max) bytes of the stream data. cmp x2, #64 b.lt .Lseal_tail_64 // Store and authenticate 64B blocks per iteration ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v21.d[0] mov x12, v21.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v22.d[0] mov x12, v22.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v23.d[0] mov x12, v23.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 // Shift the state left by 64 bytes for the next iteration of the loop mov v0.16b, v1.16b mov v5.16b, v6.16b mov v10.16b, v11.16b mov v15.16b, v16.16b mov v1.16b, v2.16b mov v6.16b, v7.16b mov v11.16b, v12.16b mov v16.16b, v17.16b mov v2.16b, v3.16b mov v7.16b, v8.16b mov v12.16b, v13.16b mov v17.16b, v18.16b mov v3.16b, v4.16b mov v8.16b, v9.16b mov v13.16b, v14.16b mov v18.16b, v19.16b b .Lseal_tail .Lseal_tail_64: ldp x3, x4, [x5, #48] // extra_in_len and extra_in_ptr // Here we handle the last [0,64) bytes of plaintext cmp x2, #16 b.lt .Lseal_tail_16 // Each iteration encrypt and authenticate a 16B block ld1 {v20.16b}, [x1], #16 eor v20.16b, v20.16b, v0.16b mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most st1 {v20.16b}, [x0], #16 sub x2, x2, #16 // Shift the state left by 16 bytes for the next iteration of the loop mov v0.16b, v5.16b mov v5.16b, v10.16b mov v10.16b, v15.16b b .Lseal_tail_64 .Lseal_tail_16: // Here we handle the last [0,16) bytes of ciphertext that require a padded block cbz x2, .Lseal_hash_extra eor v20.16b, v20.16b, v20.16b // Use T0 to load the plaintext/extra in eor v21.16b, v21.16b, v21.16b // Use T1 to generate an AND mask that will only mask the ciphertext bytes not v22.16b, v20.16b mov x6, x2 add x1, x1, x2 cbz x4, .Lseal_tail_16_compose // No extra data to pad with, zero padding mov x7, #16 // We need to load some extra_in first for padding sub x7, x7, x2 cmp x4, x7 csel x7, x4, x7, lt // .Load the minimum of extra_in_len and the amount needed to fill the register mov x12, x7 add x3, x3, x7 sub x4, x4, x7 .Lseal_tail16_compose_extra_in: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x3, #-1]! mov v20.b[0], w11 subs x7, x7, #1 b.gt .Lseal_tail16_compose_extra_in add x3, x3, x12 .Lseal_tail_16_compose: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x1, #-1]! mov v20.b[0], w11 ext v21.16b, v22.16b, v21.16b, #15 subs x2, x2, #1 b.gt .Lseal_tail_16_compose and v0.16b, v0.16b, v21.16b eor v20.16b, v20.16b, v0.16b mov v21.16b, v20.16b .Lseal_tail_16_store: umov w11, v20.b[0] strb w11, [x0], #1 ext v20.16b, v20.16b, v20.16b, #1 subs x6, x6, #1 b.gt .Lseal_tail_16_store // Hash in the final ct block concatenated with extra_in mov x11, v21.d[0] mov x12, v21.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most .Lseal_hash_extra: cbz x4, .Lseal_finalize .Lseal_hash_extra_loop: cmp x4, #16 b.lt .Lseal_hash_extra_tail ld1 {v20.16b}, [x3], #16 mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most sub x4, x4, #16 b .Lseal_hash_extra_loop .Lseal_hash_extra_tail: cbz x4, .Lseal_finalize eor v20.16b, v20.16b, v20.16b // Use T0 to load the remaining extra ciphertext add x3, x3, x4 .Lseal_hash_extra_load: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x3, #-1]! mov v20.b[0], w11 subs x4, x4, #1 b.gt .Lseal_hash_extra_load // Hash in the final padded extra_in blcok mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most .Lseal_finalize: mov x11, v31.d[0] mov x12, v31.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most // Final reduction step sub x12, xzr, x15 orr x13, xzr, #3 subs x11, x8, #-5 sbcs x12, x9, x12 sbcs x13, x10, x13 csel x8, x11, x8, cs csel x9, x12, x9, cs csel x10, x13, x10, cs mov x11, v27.d[0] mov x12, v27.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 stp x8, x9, [x5] ldp d8, d9, [sp, #16] ldp d10, d11, [sp, #32] ldp d12, d13, [sp, #48] ldp d14, d15, [sp, #64] .cfi_restore b15 .cfi_restore b14 .cfi_restore b13 .cfi_restore b12 .cfi_restore b11 .cfi_restore b10 .cfi_restore b9 .cfi_restore b8 ldp x29, x30, [sp], 80 .cfi_restore w29 .cfi_restore w30 .cfi_def_cfa_offset 0 AARCH64_VALIDATE_LINK_REGISTER ret .Lseal_128: // On some architectures preparing 5 blocks for small buffers is wasteful eor v25.16b, v25.16b, v25.16b mov x11, #1 mov v25.s[0], w11 mov v0.16b, v24.16b mov v1.16b, v24.16b mov v2.16b, v24.16b mov v5.16b, v28.16b mov v6.16b, v28.16b mov v7.16b, v28.16b mov v10.16b, v29.16b mov v11.16b, v29.16b mov v12.16b, v29.16b mov v17.16b, v30.16b add v15.4s, v17.4s, v25.4s add v16.4s, v15.4s, v25.4s mov x6, #10 .Lseal_128_rounds: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v6.16b, v6.16b, v6.16b, #4 ext v7.16b, v7.16b, v7.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 ext v16.16b, v16.16b, v16.16b, #12 ext v17.16b, v17.16b, v17.16b, #12 add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v6.16b, v6.16b, v6.16b, #12 ext v7.16b, v7.16b, v7.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 ext v16.16b, v16.16b, v16.16b, #4 ext v17.16b, v17.16b, v17.16b, #4 subs x6, x6, #1 b.hi .Lseal_128_rounds add v0.4s, v0.4s, v24.4s add v1.4s, v1.4s, v24.4s add v2.4s, v2.4s, v24.4s add v5.4s, v5.4s, v28.4s add v6.4s, v6.4s, v28.4s add v7.4s, v7.4s, v28.4s // Only the first 32 bytes of the third block (counter = 0) are needed, // so skip updating v12 and v17. add v10.4s, v10.4s, v29.4s add v11.4s, v11.4s, v29.4s add v30.4s, v30.4s, v25.4s add v15.4s, v15.4s, v30.4s add v30.4s, v30.4s, v25.4s add v16.4s, v16.4s, v30.4s and v2.16b, v2.16b, v27.16b mov x16, v2.d[0] // Move the R key to GPRs mov x17, v2.d[1] mov v27.16b, v7.16b // Store the S key bl .Lpoly_hash_ad_internal b .Lseal_tail .cfi_endproc .size chacha20_poly1305_seal,.-chacha20_poly1305_seal ///////////////////////////////// // // void chacha20_poly1305_open(uint8_t *pt, uint8_t *ct, size_t len_in, uint8_t *ad, size_t len_ad, union open_data *aead_data); // .globl chacha20_poly1305_open .hidden chacha20_poly1305_open .type chacha20_poly1305_open,%function .align 6 chacha20_poly1305_open: AARCH64_SIGN_LINK_REGISTER .cfi_startproc stp x29, x30, [sp, #-80]! .cfi_def_cfa_offset 80 .cfi_offset w30, -72 .cfi_offset w29, -80 mov x29, sp // We probably could do .cfi_def_cfa w29, 80 at this point, but since // we don't actually use the frame pointer like that, it's probably not // worth bothering. stp d8, d9, [sp, #16] stp d10, d11, [sp, #32] stp d12, d13, [sp, #48] stp d14, d15, [sp, #64] .cfi_offset b15, -8 .cfi_offset b14, -16 .cfi_offset b13, -24 .cfi_offset b12, -32 .cfi_offset b11, -40 .cfi_offset b10, -48 .cfi_offset b9, -56 .cfi_offset b8, -64 adrp x11, .Lchacha20_consts add x11, x11, :lo12:.Lchacha20_consts ld1 {v24.16b - v27.16b}, [x11] // .Load the CONSTS, INC, ROL8 and CLAMP values ld1 {v28.16b - v30.16b}, [x5] mov x15, #1 // Prepare the Poly1305 state mov x8, #0 mov x9, #0 mov x10, #0 mov v31.d[0], x4 // Store the input and aad lengths mov v31.d[1], x2 cmp x2, #128 b.le .Lopen_128 // Optimization for smaller buffers // Initially we prepare a single ChaCha20 block for the Poly1305 R and S keys mov v0.16b, v24.16b mov v5.16b, v28.16b mov v10.16b, v29.16b mov v15.16b, v30.16b mov x6, #10 .align 5 .Lopen_init_rounds: add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 subs x6, x6, #1 b.hi .Lopen_init_rounds add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s and v0.16b, v0.16b, v27.16b mov x16, v0.d[0] // Move the R key to GPRs mov x17, v0.d[1] mov v27.16b, v5.16b // Store the S key bl .Lpoly_hash_ad_internal .Lopen_ad_done: mov x3, x1 // Each iteration of the loop hash 320 bytes, and prepare stream for 320 bytes .Lopen_main_loop: cmp x2, #192 b.lt .Lopen_tail adrp x11, .Lchacha20_consts add x11, x11, :lo12:.Lchacha20_consts ld4r {v0.4s,v1.4s,v2.4s,v3.4s}, [x11] mov v4.16b, v24.16b ld4r {v5.4s,v6.4s,v7.4s,v8.4s}, [x5], #16 mov v9.16b, v28.16b ld4r {v10.4s,v11.4s,v12.4s,v13.4s}, [x5], #16 mov v14.16b, v29.16b ld4r {v15.4s,v16.4s,v17.4s,v18.4s}, [x5] sub x5, x5, #32 add v15.4s, v15.4s, v25.4s mov v19.16b, v30.16b eor v20.16b, v20.16b, v20.16b //zero not v21.16b, v20.16b // -1 sub v21.4s, v25.4s, v21.4s // Add +1 ext v20.16b, v21.16b, v20.16b, #12 // Get the last element (counter) add v19.4s, v19.4s, v20.4s lsr x4, x2, #4 // How many whole blocks we have to hash, will always be at least 12 sub x4, x4, #10 mov x7, #10 subs x6, x7, x4 subs x6, x7, x4 // itr1 can be negative if we have more than 320 bytes to hash csel x7, x7, x4, le // if itr1 is zero or less, itr2 should be 10 to indicate all 10 rounds are full cbz x7, .Lopen_main_loop_rounds_short .align 5 .Lopen_main_loop_rounds: ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most .Lopen_main_loop_rounds_short: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v9.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v18.8h, v18.8h rev32 v19.8h, v19.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b eor v8.16b, v8.16b, v13.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v9.4s, #20 sli v8.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s add v3.4s, v3.4s, v7.4s add v4.4s, v4.4s, v8.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v18.16b, {v18.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v14.16b ushr v9.4s, v8.4s, #25 sli v9.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #4 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #12 ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most add v0.4s, v0.4s, v6.4s add v1.4s, v1.4s, v7.4s add v2.4s, v2.4s, v8.4s add v3.4s, v3.4s, v5.4s add v4.4s, v4.4s, v9.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v18.8h, v18.8h rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v19.8h, v19.8h add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v5.4s, #20 sli v8.4s, v5.4s, #12 ushr v5.4s, v9.4s, #20 sli v5.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v5.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v18.16b, {v18.16b}, v26.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v12.16b eor v6.16b, v6.16b, v13.16b eor v7.16b, v7.16b, v10.16b eor v8.16b, v8.16b, v11.16b eor v5.16b, v5.16b, v14.16b ushr v9.4s, v5.4s, #25 sli v9.4s, v5.4s, #7 ushr v5.4s, v8.4s, #25 sli v5.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #12 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #4 subs x7, x7, #1 b.gt .Lopen_main_loop_rounds subs x6, x6, #1 b.ge .Lopen_main_loop_rounds_short eor v20.16b, v20.16b, v20.16b //zero not v21.16b, v20.16b // -1 sub v21.4s, v25.4s, v21.4s // Add +1 ext v20.16b, v21.16b, v20.16b, #12 // Get the last element (counter) add v19.4s, v19.4s, v20.4s add v15.4s, v15.4s, v25.4s mov x11, #5 dup v20.4s, w11 add v25.4s, v25.4s, v20.4s zip1 v20.4s, v0.4s, v1.4s zip2 v21.4s, v0.4s, v1.4s zip1 v22.4s, v2.4s, v3.4s zip2 v23.4s, v2.4s, v3.4s zip1 v0.2d, v20.2d, v22.2d zip2 v1.2d, v20.2d, v22.2d zip1 v2.2d, v21.2d, v23.2d zip2 v3.2d, v21.2d, v23.2d zip1 v20.4s, v5.4s, v6.4s zip2 v21.4s, v5.4s, v6.4s zip1 v22.4s, v7.4s, v8.4s zip2 v23.4s, v7.4s, v8.4s zip1 v5.2d, v20.2d, v22.2d zip2 v6.2d, v20.2d, v22.2d zip1 v7.2d, v21.2d, v23.2d zip2 v8.2d, v21.2d, v23.2d zip1 v20.4s, v10.4s, v11.4s zip2 v21.4s, v10.4s, v11.4s zip1 v22.4s, v12.4s, v13.4s zip2 v23.4s, v12.4s, v13.4s zip1 v10.2d, v20.2d, v22.2d zip2 v11.2d, v20.2d, v22.2d zip1 v12.2d, v21.2d, v23.2d zip2 v13.2d, v21.2d, v23.2d zip1 v20.4s, v15.4s, v16.4s zip2 v21.4s, v15.4s, v16.4s zip1 v22.4s, v17.4s, v18.4s zip2 v23.4s, v17.4s, v18.4s zip1 v15.2d, v20.2d, v22.2d zip2 v16.2d, v20.2d, v22.2d zip1 v17.2d, v21.2d, v23.2d zip2 v18.2d, v21.2d, v23.2d add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s add v10.4s, v10.4s, v29.4s add v15.4s, v15.4s, v30.4s add v1.4s, v1.4s, v24.4s add v6.4s, v6.4s, v28.4s add v11.4s, v11.4s, v29.4s add v16.4s, v16.4s, v30.4s add v2.4s, v2.4s, v24.4s add v7.4s, v7.4s, v28.4s add v12.4s, v12.4s, v29.4s add v17.4s, v17.4s, v30.4s add v3.4s, v3.4s, v24.4s add v8.4s, v8.4s, v28.4s add v13.4s, v13.4s, v29.4s add v18.4s, v18.4s, v30.4s add v4.4s, v4.4s, v24.4s add v9.4s, v9.4s, v28.4s add v14.4s, v14.4s, v29.4s add v19.4s, v19.4s, v30.4s // We can always safely store 192 bytes ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v2.16b eor v21.16b, v21.16b, v7.16b eor v22.16b, v22.16b, v12.16b eor v23.16b, v23.16b, v17.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #192 mov v0.16b, v3.16b mov v5.16b, v8.16b mov v10.16b, v13.16b mov v15.16b, v18.16b cmp x2, #64 b.lt .Lopen_tail_64_store ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v3.16b eor v21.16b, v21.16b, v8.16b eor v22.16b, v22.16b, v13.16b eor v23.16b, v23.16b, v18.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 mov v0.16b, v4.16b mov v5.16b, v9.16b mov v10.16b, v14.16b mov v15.16b, v19.16b cmp x2, #64 b.lt .Lopen_tail_64_store ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v4.16b eor v21.16b, v21.16b, v9.16b eor v22.16b, v22.16b, v14.16b eor v23.16b, v23.16b, v19.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 b .Lopen_main_loop .Lopen_tail: cbz x2, .Lopen_finalize lsr x4, x2, #4 // How many whole blocks we have to hash cmp x2, #64 b.le .Lopen_tail_64 cmp x2, #128 b.le .Lopen_tail_128 .Lopen_tail_192: // We need three more blocks mov v0.16b, v24.16b mov v1.16b, v24.16b mov v2.16b, v24.16b mov v5.16b, v28.16b mov v6.16b, v28.16b mov v7.16b, v28.16b mov v10.16b, v29.16b mov v11.16b, v29.16b mov v12.16b, v29.16b mov v15.16b, v30.16b mov v16.16b, v30.16b mov v17.16b, v30.16b eor v23.16b, v23.16b, v23.16b eor v21.16b, v21.16b, v21.16b ins v23.s[0], v25.s[0] ins v21.d[0], x15 add v22.4s, v23.4s, v21.4s add v21.4s, v22.4s, v21.4s add v15.4s, v15.4s, v21.4s add v16.4s, v16.4s, v23.4s add v17.4s, v17.4s, v22.4s mov x7, #10 subs x6, x7, x4 // itr1 can be negative if we have more than 160 bytes to hash csel x7, x7, x4, le // if itr1 is zero or less, itr2 should be 10 to indicate all 10 rounds are hashing sub x4, x4, x7 cbz x7, .Lopen_tail_192_rounds_no_hash .Lopen_tail_192_rounds: ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most .Lopen_tail_192_rounds_no_hash: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v6.16b, v6.16b, v6.16b, #4 ext v7.16b, v7.16b, v7.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 ext v16.16b, v16.16b, v16.16b, #12 ext v17.16b, v17.16b, v17.16b, #12 add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v6.16b, v6.16b, v6.16b, #12 ext v7.16b, v7.16b, v7.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 ext v16.16b, v16.16b, v16.16b, #4 ext v17.16b, v17.16b, v17.16b, #4 subs x7, x7, #1 b.gt .Lopen_tail_192_rounds subs x6, x6, #1 b.ge .Lopen_tail_192_rounds_no_hash // We hashed 160 bytes at most, may still have 32 bytes left .Lopen_tail_192_hash: cbz x4, .Lopen_tail_192_hash_done ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most sub x4, x4, #1 b .Lopen_tail_192_hash .Lopen_tail_192_hash_done: add v0.4s, v0.4s, v24.4s add v1.4s, v1.4s, v24.4s add v2.4s, v2.4s, v24.4s add v5.4s, v5.4s, v28.4s add v6.4s, v6.4s, v28.4s add v7.4s, v7.4s, v28.4s add v10.4s, v10.4s, v29.4s add v11.4s, v11.4s, v29.4s add v12.4s, v12.4s, v29.4s add v15.4s, v15.4s, v30.4s add v16.4s, v16.4s, v30.4s add v17.4s, v17.4s, v30.4s add v15.4s, v15.4s, v21.4s add v16.4s, v16.4s, v23.4s add v17.4s, v17.4s, v22.4s ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v2.16b eor v21.16b, v21.16b, v7.16b eor v22.16b, v22.16b, v12.16b eor v23.16b, v23.16b, v17.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #128 b .Lopen_tail_64_store .Lopen_tail_128: // We need two more blocks mov v0.16b, v24.16b mov v1.16b, v24.16b mov v5.16b, v28.16b mov v6.16b, v28.16b mov v10.16b, v29.16b mov v11.16b, v29.16b mov v15.16b, v30.16b mov v16.16b, v30.16b eor v23.16b, v23.16b, v23.16b eor v22.16b, v22.16b, v22.16b ins v23.s[0], v25.s[0] ins v22.d[0], x15 add v22.4s, v22.4s, v23.4s add v15.4s, v15.4s, v22.4s add v16.4s, v16.4s, v23.4s mov x6, #10 sub x6, x6, x4 .Lopen_tail_128_rounds: add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 add v1.4s, v1.4s, v6.4s eor v16.16b, v16.16b, v1.16b rev32 v16.8h, v16.8h add v11.4s, v11.4s, v16.4s eor v6.16b, v6.16b, v11.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 add v1.4s, v1.4s, v20.4s eor v16.16b, v16.16b, v1.16b tbl v16.16b, {v16.16b}, v26.16b add v11.4s, v11.4s, v16.4s eor v20.16b, v20.16b, v11.16b ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v6.16b, v6.16b, v6.16b, #4 ext v11.16b, v11.16b, v11.16b, #8 ext v16.16b, v16.16b, v16.16b, #12 add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 add v1.4s, v1.4s, v6.4s eor v16.16b, v16.16b, v1.16b rev32 v16.8h, v16.8h add v11.4s, v11.4s, v16.4s eor v6.16b, v6.16b, v11.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 add v1.4s, v1.4s, v20.4s eor v16.16b, v16.16b, v1.16b tbl v16.16b, {v16.16b}, v26.16b add v11.4s, v11.4s, v16.4s eor v20.16b, v20.16b, v11.16b ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v6.16b, v6.16b, v6.16b, #12 ext v11.16b, v11.16b, v11.16b, #8 ext v16.16b, v16.16b, v16.16b, #4 subs x6, x6, #1 b.gt .Lopen_tail_128_rounds cbz x4, .Lopen_tail_128_rounds_done subs x4, x4, #1 ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most b .Lopen_tail_128_rounds .Lopen_tail_128_rounds_done: add v0.4s, v0.4s, v24.4s add v1.4s, v1.4s, v24.4s add v5.4s, v5.4s, v28.4s add v6.4s, v6.4s, v28.4s add v10.4s, v10.4s, v29.4s add v11.4s, v11.4s, v29.4s add v15.4s, v15.4s, v30.4s add v16.4s, v16.4s, v30.4s add v15.4s, v15.4s, v22.4s add v16.4s, v16.4s, v23.4s ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 b .Lopen_tail_64_store .Lopen_tail_64: // We just need a single block mov v0.16b, v24.16b mov v5.16b, v28.16b mov v10.16b, v29.16b mov v15.16b, v30.16b eor v23.16b, v23.16b, v23.16b ins v23.s[0], v25.s[0] add v15.4s, v15.4s, v23.4s mov x6, #10 sub x6, x6, x4 .Lopen_tail_64_rounds: add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 subs x6, x6, #1 b.gt .Lopen_tail_64_rounds cbz x4, .Lopen_tail_64_rounds_done subs x4, x4, #1 ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most b .Lopen_tail_64_rounds .Lopen_tail_64_rounds_done: add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s add v10.4s, v10.4s, v29.4s add v15.4s, v15.4s, v30.4s add v15.4s, v15.4s, v23.4s .Lopen_tail_64_store: cmp x2, #16 b.lt .Lopen_tail_16 ld1 {v20.16b}, [x1], #16 eor v20.16b, v20.16b, v0.16b st1 {v20.16b}, [x0], #16 mov v0.16b, v5.16b mov v5.16b, v10.16b mov v10.16b, v15.16b sub x2, x2, #16 b .Lopen_tail_64_store .Lopen_tail_16: // Here we handle the last [0,16) bytes that require a padded block cbz x2, .Lopen_finalize eor v20.16b, v20.16b, v20.16b // Use T0 to load the ciphertext eor v21.16b, v21.16b, v21.16b // Use T1 to generate an AND mask not v22.16b, v20.16b add x7, x1, x2 mov x6, x2 .Lopen_tail_16_compose: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x7, #-1]! mov v20.b[0], w11 ext v21.16b, v22.16b, v21.16b, #15 subs x2, x2, #1 b.gt .Lopen_tail_16_compose and v20.16b, v20.16b, v21.16b // Hash in the final padded block mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most eor v20.16b, v20.16b, v0.16b .Lopen_tail_16_store: umov w11, v20.b[0] strb w11, [x0], #1 ext v20.16b, v20.16b, v20.16b, #1 subs x6, x6, #1 b.gt .Lopen_tail_16_store .Lopen_finalize: mov x11, v31.d[0] mov x12, v31.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most // Final reduction step sub x12, xzr, x15 orr x13, xzr, #3 subs x11, x8, #-5 sbcs x12, x9, x12 sbcs x13, x10, x13 csel x8, x11, x8, cs csel x9, x12, x9, cs csel x10, x13, x10, cs mov x11, v27.d[0] mov x12, v27.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 stp x8, x9, [x5] ldp d8, d9, [sp, #16] ldp d10, d11, [sp, #32] ldp d12, d13, [sp, #48] ldp d14, d15, [sp, #64] .cfi_restore b15 .cfi_restore b14 .cfi_restore b13 .cfi_restore b12 .cfi_restore b11 .cfi_restore b10 .cfi_restore b9 .cfi_restore b8 ldp x29, x30, [sp], 80 .cfi_restore w29 .cfi_restore w30 .cfi_def_cfa_offset 0 AARCH64_VALIDATE_LINK_REGISTER ret .Lopen_128: // On some architectures preparing 5 blocks for small buffers is wasteful eor v25.16b, v25.16b, v25.16b mov x11, #1 mov v25.s[0], w11 mov v0.16b, v24.16b mov v1.16b, v24.16b mov v2.16b, v24.16b mov v5.16b, v28.16b mov v6.16b, v28.16b mov v7.16b, v28.16b mov v10.16b, v29.16b mov v11.16b, v29.16b mov v12.16b, v29.16b mov v17.16b, v30.16b add v15.4s, v17.4s, v25.4s add v16.4s, v15.4s, v25.4s mov x6, #10 .Lopen_128_rounds: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v6.16b, v6.16b, v6.16b, #4 ext v7.16b, v7.16b, v7.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 ext v16.16b, v16.16b, v16.16b, #12 ext v17.16b, v17.16b, v17.16b, #12 add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v6.16b, v6.16b, v6.16b, #12 ext v7.16b, v7.16b, v7.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 ext v16.16b, v16.16b, v16.16b, #4 ext v17.16b, v17.16b, v17.16b, #4 subs x6, x6, #1 b.hi .Lopen_128_rounds add v0.4s, v0.4s, v24.4s add v1.4s, v1.4s, v24.4s add v2.4s, v2.4s, v24.4s add v5.4s, v5.4s, v28.4s add v6.4s, v6.4s, v28.4s add v7.4s, v7.4s, v28.4s add v10.4s, v10.4s, v29.4s add v11.4s, v11.4s, v29.4s add v30.4s, v30.4s, v25.4s add v15.4s, v15.4s, v30.4s add v30.4s, v30.4s, v25.4s add v16.4s, v16.4s, v30.4s and v2.16b, v2.16b, v27.16b mov x16, v2.d[0] // Move the R key to GPRs mov x17, v2.d[1] mov v27.16b, v7.16b // Store the S key bl .Lpoly_hash_ad_internal .Lopen_128_store: cmp x2, #64 b.lt .Lopen_128_store_64 ld1 {v20.16b - v23.16b}, [x1], #64 mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v21.d[0] mov x12, v21.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v22.d[0] mov x12, v22.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v23.d[0] mov x12, v23.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 mov v0.16b, v1.16b mov v5.16b, v6.16b mov v10.16b, v11.16b mov v15.16b, v16.16b .Lopen_128_store_64: lsr x4, x2, #4 mov x3, x1 .Lopen_128_hash_64: cbz x4, .Lopen_tail_64_store ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most sub x4, x4, #1 b .Lopen_128_hash_64 .cfi_endproc .size chacha20_poly1305_open,.-chacha20_poly1305_open #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__ELF__)
marvin-hansen/iggy-streaming-system
26,588
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-ppc64le/crypto/test/trampoline-ppc.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(__powerpc64__) && defined(__ELF__) #include <openssl/boringssl_prefix_symbols_asm.h> .machine "any" .abiversion 2 .text .globl abi_test_trampoline .type abi_test_trampoline,@function .align 5 abi_test_trampoline: .localentry abi_test_trampoline,0 mflr 0 std 0, 16(1) stdu 1, -528(1) mfcr 0 std 0, 8(1) std 2, 24(1) std 4, 32(1) li 11, 48 stvx 20, 11, 1 li 11, 64 stvx 21, 11, 1 li 11, 80 stvx 22, 11, 1 li 11, 96 stvx 23, 11, 1 li 11, 112 stvx 24, 11, 1 li 11, 128 stvx 25, 11, 1 li 11, 144 stvx 26, 11, 1 li 11, 160 stvx 27, 11, 1 li 11, 176 stvx 28, 11, 1 li 11, 192 stvx 29, 11, 1 li 11, 208 stvx 30, 11, 1 li 11, 224 stvx 31, 11, 1 std 14, 240(1) std 15, 248(1) std 16, 256(1) std 17, 264(1) std 18, 272(1) std 19, 280(1) std 20, 288(1) std 21, 296(1) std 22, 304(1) std 23, 312(1) std 24, 320(1) std 25, 328(1) std 26, 336(1) std 27, 344(1) std 28, 352(1) std 29, 360(1) std 30, 368(1) std 31, 376(1) stfd 14, 384(1) stfd 15, 392(1) stfd 16, 400(1) stfd 17, 408(1) stfd 18, 416(1) stfd 19, 424(1) stfd 20, 432(1) stfd 21, 440(1) stfd 22, 448(1) stfd 23, 456(1) stfd 24, 464(1) stfd 25, 472(1) stfd 26, 480(1) stfd 27, 488(1) stfd 28, 496(1) stfd 29, 504(1) stfd 30, 512(1) stfd 31, 520(1) li 11, 0 lvx 20, 11, 4 li 11, 16 lvx 21, 11, 4 li 11, 32 lvx 22, 11, 4 li 11, 48 lvx 23, 11, 4 li 11, 64 lvx 24, 11, 4 li 11, 80 lvx 25, 11, 4 li 11, 96 lvx 26, 11, 4 li 11, 112 lvx 27, 11, 4 li 11, 128 lvx 28, 11, 4 li 11, 144 lvx 29, 11, 4 li 11, 160 lvx 30, 11, 4 li 11, 176 lvx 31, 11, 4 ld 14, 192(4) ld 15, 200(4) ld 16, 208(4) ld 17, 216(4) ld 18, 224(4) ld 19, 232(4) ld 20, 240(4) ld 21, 248(4) ld 22, 256(4) ld 23, 264(4) ld 24, 272(4) ld 25, 280(4) ld 26, 288(4) ld 27, 296(4) ld 28, 304(4) ld 29, 312(4) ld 30, 320(4) ld 31, 328(4) lfd 14, 336(4) lfd 15, 344(4) lfd 16, 352(4) lfd 17, 360(4) lfd 18, 368(4) lfd 19, 376(4) lfd 20, 384(4) lfd 21, 392(4) lfd 22, 400(4) lfd 23, 408(4) lfd 24, 416(4) lfd 25, 424(4) lfd 26, 432(4) lfd 27, 440(4) lfd 28, 448(4) lfd 29, 456(4) lfd 30, 464(4) lfd 31, 472(4) ld 0, 480(4) mtcr 0 addi 11, 5, -8 mr 12, 3 cmpdi 6, 0 beq .Largs_done mtctr 6 ldu 3, 8(11) bdz .Largs_done ldu 4, 8(11) bdz .Largs_done ldu 5, 8(11) bdz .Largs_done ldu 6, 8(11) bdz .Largs_done ldu 7, 8(11) bdz .Largs_done ldu 8, 8(11) bdz .Largs_done ldu 9, 8(11) bdz .Largs_done ldu 10, 8(11) .Largs_done: li 2, 0 mtctr 12 bctrl ld 2, 24(1) ld 4, 32(1) li 11, 0 stvx 20, 11, 4 li 11, 16 stvx 21, 11, 4 li 11, 32 stvx 22, 11, 4 li 11, 48 stvx 23, 11, 4 li 11, 64 stvx 24, 11, 4 li 11, 80 stvx 25, 11, 4 li 11, 96 stvx 26, 11, 4 li 11, 112 stvx 27, 11, 4 li 11, 128 stvx 28, 11, 4 li 11, 144 stvx 29, 11, 4 li 11, 160 stvx 30, 11, 4 li 11, 176 stvx 31, 11, 4 std 14, 192(4) std 15, 200(4) std 16, 208(4) std 17, 216(4) std 18, 224(4) std 19, 232(4) std 20, 240(4) std 21, 248(4) std 22, 256(4) std 23, 264(4) std 24, 272(4) std 25, 280(4) std 26, 288(4) std 27, 296(4) std 28, 304(4) std 29, 312(4) std 30, 320(4) std 31, 328(4) stfd 14, 336(4) stfd 15, 344(4) stfd 16, 352(4) stfd 17, 360(4) stfd 18, 368(4) stfd 19, 376(4) stfd 20, 384(4) stfd 21, 392(4) stfd 22, 400(4) stfd 23, 408(4) stfd 24, 416(4) stfd 25, 424(4) stfd 26, 432(4) stfd 27, 440(4) stfd 28, 448(4) stfd 29, 456(4) stfd 30, 464(4) stfd 31, 472(4) li 11, 48 lvx 20, 11, 1 li 11, 64 lvx 21, 11, 1 li 11, 80 lvx 22, 11, 1 li 11, 96 lvx 23, 11, 1 li 11, 112 lvx 24, 11, 1 li 11, 128 lvx 25, 11, 1 li 11, 144 lvx 26, 11, 1 li 11, 160 lvx 27, 11, 1 li 11, 176 lvx 28, 11, 1 li 11, 192 lvx 29, 11, 1 li 11, 208 lvx 30, 11, 1 li 11, 224 lvx 31, 11, 1 ld 14, 240(1) ld 15, 248(1) ld 16, 256(1) ld 17, 264(1) ld 18, 272(1) ld 19, 280(1) ld 20, 288(1) ld 21, 296(1) ld 22, 304(1) ld 23, 312(1) ld 24, 320(1) ld 25, 328(1) ld 26, 336(1) ld 27, 344(1) ld 28, 352(1) ld 29, 360(1) ld 30, 368(1) ld 31, 376(1) lfd 14, 384(1) lfd 15, 392(1) lfd 16, 400(1) lfd 17, 408(1) lfd 18, 416(1) lfd 19, 424(1) lfd 20, 432(1) lfd 21, 440(1) lfd 22, 448(1) lfd 23, 456(1) lfd 24, 464(1) lfd 25, 472(1) lfd 26, 480(1) lfd 27, 488(1) lfd 28, 496(1) lfd 29, 504(1) lfd 30, 512(1) lfd 31, 520(1) mfcr 0 std 0, 480(4) ld 0, 8(1) mtcrf 0b00111000, 0 addi 1, 1, 528 ld 0, 16(1) mtlr 0 blr .size abi_test_trampoline,.-abi_test_trampoline .globl abi_test_clobber_r0 .type abi_test_clobber_r0,@function .align 5 abi_test_clobber_r0: .localentry abi_test_clobber_r0,0 li 0, 0 blr .size abi_test_clobber_r0,.-abi_test_clobber_r0 .globl abi_test_clobber_r2 .type abi_test_clobber_r2,@function .align 5 abi_test_clobber_r2: .localentry abi_test_clobber_r2,0 li 2, 0 blr .size abi_test_clobber_r2,.-abi_test_clobber_r2 .globl abi_test_clobber_r3 .type abi_test_clobber_r3,@function .align 5 abi_test_clobber_r3: .localentry abi_test_clobber_r3,0 li 3, 0 blr .size abi_test_clobber_r3,.-abi_test_clobber_r3 .globl abi_test_clobber_r4 .type abi_test_clobber_r4,@function .align 5 abi_test_clobber_r4: .localentry abi_test_clobber_r4,0 li 4, 0 blr .size abi_test_clobber_r4,.-abi_test_clobber_r4 .globl abi_test_clobber_r5 .type abi_test_clobber_r5,@function .align 5 abi_test_clobber_r5: .localentry abi_test_clobber_r5,0 li 5, 0 blr .size abi_test_clobber_r5,.-abi_test_clobber_r5 .globl abi_test_clobber_r6 .type abi_test_clobber_r6,@function .align 5 abi_test_clobber_r6: .localentry abi_test_clobber_r6,0 li 6, 0 blr .size abi_test_clobber_r6,.-abi_test_clobber_r6 .globl abi_test_clobber_r7 .type abi_test_clobber_r7,@function .align 5 abi_test_clobber_r7: .localentry abi_test_clobber_r7,0 li 7, 0 blr .size abi_test_clobber_r7,.-abi_test_clobber_r7 .globl abi_test_clobber_r8 .type abi_test_clobber_r8,@function .align 5 abi_test_clobber_r8: .localentry abi_test_clobber_r8,0 li 8, 0 blr .size abi_test_clobber_r8,.-abi_test_clobber_r8 .globl abi_test_clobber_r9 .type abi_test_clobber_r9,@function .align 5 abi_test_clobber_r9: .localentry abi_test_clobber_r9,0 li 9, 0 blr .size abi_test_clobber_r9,.-abi_test_clobber_r9 .globl abi_test_clobber_r10 .type abi_test_clobber_r10,@function .align 5 abi_test_clobber_r10: .localentry abi_test_clobber_r10,0 li 10, 0 blr .size abi_test_clobber_r10,.-abi_test_clobber_r10 .globl abi_test_clobber_r11 .type abi_test_clobber_r11,@function .align 5 abi_test_clobber_r11: .localentry abi_test_clobber_r11,0 li 11, 0 blr .size abi_test_clobber_r11,.-abi_test_clobber_r11 .globl abi_test_clobber_r12 .type abi_test_clobber_r12,@function .align 5 abi_test_clobber_r12: .localentry abi_test_clobber_r12,0 li 12, 0 blr .size abi_test_clobber_r12,.-abi_test_clobber_r12 .globl abi_test_clobber_r14 .type abi_test_clobber_r14,@function .align 5 abi_test_clobber_r14: .localentry abi_test_clobber_r14,0 li 14, 0 blr .size abi_test_clobber_r14,.-abi_test_clobber_r14 .globl abi_test_clobber_r15 .type abi_test_clobber_r15,@function .align 5 abi_test_clobber_r15: .localentry abi_test_clobber_r15,0 li 15, 0 blr .size abi_test_clobber_r15,.-abi_test_clobber_r15 .globl abi_test_clobber_r16 .type abi_test_clobber_r16,@function .align 5 abi_test_clobber_r16: .localentry abi_test_clobber_r16,0 li 16, 0 blr .size abi_test_clobber_r16,.-abi_test_clobber_r16 .globl abi_test_clobber_r17 .type abi_test_clobber_r17,@function .align 5 abi_test_clobber_r17: .localentry abi_test_clobber_r17,0 li 17, 0 blr .size abi_test_clobber_r17,.-abi_test_clobber_r17 .globl abi_test_clobber_r18 .type abi_test_clobber_r18,@function .align 5 abi_test_clobber_r18: .localentry abi_test_clobber_r18,0 li 18, 0 blr .size abi_test_clobber_r18,.-abi_test_clobber_r18 .globl abi_test_clobber_r19 .type abi_test_clobber_r19,@function .align 5 abi_test_clobber_r19: .localentry abi_test_clobber_r19,0 li 19, 0 blr .size abi_test_clobber_r19,.-abi_test_clobber_r19 .globl abi_test_clobber_r20 .type abi_test_clobber_r20,@function .align 5 abi_test_clobber_r20: .localentry abi_test_clobber_r20,0 li 20, 0 blr .size abi_test_clobber_r20,.-abi_test_clobber_r20 .globl abi_test_clobber_r21 .type abi_test_clobber_r21,@function .align 5 abi_test_clobber_r21: .localentry abi_test_clobber_r21,0 li 21, 0 blr .size abi_test_clobber_r21,.-abi_test_clobber_r21 .globl abi_test_clobber_r22 .type abi_test_clobber_r22,@function .align 5 abi_test_clobber_r22: .localentry abi_test_clobber_r22,0 li 22, 0 blr .size abi_test_clobber_r22,.-abi_test_clobber_r22 .globl abi_test_clobber_r23 .type abi_test_clobber_r23,@function .align 5 abi_test_clobber_r23: .localentry abi_test_clobber_r23,0 li 23, 0 blr .size abi_test_clobber_r23,.-abi_test_clobber_r23 .globl abi_test_clobber_r24 .type abi_test_clobber_r24,@function .align 5 abi_test_clobber_r24: .localentry abi_test_clobber_r24,0 li 24, 0 blr .size abi_test_clobber_r24,.-abi_test_clobber_r24 .globl abi_test_clobber_r25 .type abi_test_clobber_r25,@function .align 5 abi_test_clobber_r25: .localentry abi_test_clobber_r25,0 li 25, 0 blr .size abi_test_clobber_r25,.-abi_test_clobber_r25 .globl abi_test_clobber_r26 .type abi_test_clobber_r26,@function .align 5 abi_test_clobber_r26: .localentry abi_test_clobber_r26,0 li 26, 0 blr .size abi_test_clobber_r26,.-abi_test_clobber_r26 .globl abi_test_clobber_r27 .type abi_test_clobber_r27,@function .align 5 abi_test_clobber_r27: .localentry abi_test_clobber_r27,0 li 27, 0 blr .size abi_test_clobber_r27,.-abi_test_clobber_r27 .globl abi_test_clobber_r28 .type abi_test_clobber_r28,@function .align 5 abi_test_clobber_r28: .localentry abi_test_clobber_r28,0 li 28, 0 blr .size abi_test_clobber_r28,.-abi_test_clobber_r28 .globl abi_test_clobber_r29 .type abi_test_clobber_r29,@function .align 5 abi_test_clobber_r29: .localentry abi_test_clobber_r29,0 li 29, 0 blr .size abi_test_clobber_r29,.-abi_test_clobber_r29 .globl abi_test_clobber_r30 .type abi_test_clobber_r30,@function .align 5 abi_test_clobber_r30: .localentry abi_test_clobber_r30,0 li 30, 0 blr .size abi_test_clobber_r30,.-abi_test_clobber_r30 .globl abi_test_clobber_r31 .type abi_test_clobber_r31,@function .align 5 abi_test_clobber_r31: .localentry abi_test_clobber_r31,0 li 31, 0 blr .size abi_test_clobber_r31,.-abi_test_clobber_r31 .globl abi_test_clobber_f0 .type abi_test_clobber_f0,@function .align 4 abi_test_clobber_f0: .localentry abi_test_clobber_f0,0 li 0, 0 std 0, -8(1) lfd 0, -8(1) blr .size abi_test_clobber_f0,.-abi_test_clobber_f0 .globl abi_test_clobber_f1 .type abi_test_clobber_f1,@function .align 4 abi_test_clobber_f1: .localentry abi_test_clobber_f1,0 li 0, 0 std 0, -8(1) lfd 1, -8(1) blr .size abi_test_clobber_f1,.-abi_test_clobber_f1 .globl abi_test_clobber_f2 .type abi_test_clobber_f2,@function .align 4 abi_test_clobber_f2: .localentry abi_test_clobber_f2,0 li 0, 0 std 0, -8(1) lfd 2, -8(1) blr .size abi_test_clobber_f2,.-abi_test_clobber_f2 .globl abi_test_clobber_f3 .type abi_test_clobber_f3,@function .align 4 abi_test_clobber_f3: .localentry abi_test_clobber_f3,0 li 0, 0 std 0, -8(1) lfd 3, -8(1) blr .size abi_test_clobber_f3,.-abi_test_clobber_f3 .globl abi_test_clobber_f4 .type abi_test_clobber_f4,@function .align 4 abi_test_clobber_f4: .localentry abi_test_clobber_f4,0 li 0, 0 std 0, -8(1) lfd 4, -8(1) blr .size abi_test_clobber_f4,.-abi_test_clobber_f4 .globl abi_test_clobber_f5 .type abi_test_clobber_f5,@function .align 4 abi_test_clobber_f5: .localentry abi_test_clobber_f5,0 li 0, 0 std 0, -8(1) lfd 5, -8(1) blr .size abi_test_clobber_f5,.-abi_test_clobber_f5 .globl abi_test_clobber_f6 .type abi_test_clobber_f6,@function .align 4 abi_test_clobber_f6: .localentry abi_test_clobber_f6,0 li 0, 0 std 0, -8(1) lfd 6, -8(1) blr .size abi_test_clobber_f6,.-abi_test_clobber_f6 .globl abi_test_clobber_f7 .type abi_test_clobber_f7,@function .align 4 abi_test_clobber_f7: .localentry abi_test_clobber_f7,0 li 0, 0 std 0, -8(1) lfd 7, -8(1) blr .size abi_test_clobber_f7,.-abi_test_clobber_f7 .globl abi_test_clobber_f8 .type abi_test_clobber_f8,@function .align 4 abi_test_clobber_f8: .localentry abi_test_clobber_f8,0 li 0, 0 std 0, -8(1) lfd 8, -8(1) blr .size abi_test_clobber_f8,.-abi_test_clobber_f8 .globl abi_test_clobber_f9 .type abi_test_clobber_f9,@function .align 4 abi_test_clobber_f9: .localentry abi_test_clobber_f9,0 li 0, 0 std 0, -8(1) lfd 9, -8(1) blr .size abi_test_clobber_f9,.-abi_test_clobber_f9 .globl abi_test_clobber_f10 .type abi_test_clobber_f10,@function .align 4 abi_test_clobber_f10: .localentry abi_test_clobber_f10,0 li 0, 0 std 0, -8(1) lfd 10, -8(1) blr .size abi_test_clobber_f10,.-abi_test_clobber_f10 .globl abi_test_clobber_f11 .type abi_test_clobber_f11,@function .align 4 abi_test_clobber_f11: .localentry abi_test_clobber_f11,0 li 0, 0 std 0, -8(1) lfd 11, -8(1) blr .size abi_test_clobber_f11,.-abi_test_clobber_f11 .globl abi_test_clobber_f12 .type abi_test_clobber_f12,@function .align 4 abi_test_clobber_f12: .localentry abi_test_clobber_f12,0 li 0, 0 std 0, -8(1) lfd 12, -8(1) blr .size abi_test_clobber_f12,.-abi_test_clobber_f12 .globl abi_test_clobber_f13 .type abi_test_clobber_f13,@function .align 4 abi_test_clobber_f13: .localentry abi_test_clobber_f13,0 li 0, 0 std 0, -8(1) lfd 13, -8(1) blr .size abi_test_clobber_f13,.-abi_test_clobber_f13 .globl abi_test_clobber_f14 .type abi_test_clobber_f14,@function .align 4 abi_test_clobber_f14: .localentry abi_test_clobber_f14,0 li 0, 0 std 0, -8(1) lfd 14, -8(1) blr .size abi_test_clobber_f14,.-abi_test_clobber_f14 .globl abi_test_clobber_f15 .type abi_test_clobber_f15,@function .align 4 abi_test_clobber_f15: .localentry abi_test_clobber_f15,0 li 0, 0 std 0, -8(1) lfd 15, -8(1) blr .size abi_test_clobber_f15,.-abi_test_clobber_f15 .globl abi_test_clobber_f16 .type abi_test_clobber_f16,@function .align 4 abi_test_clobber_f16: .localentry abi_test_clobber_f16,0 li 0, 0 std 0, -8(1) lfd 16, -8(1) blr .size abi_test_clobber_f16,.-abi_test_clobber_f16 .globl abi_test_clobber_f17 .type abi_test_clobber_f17,@function .align 4 abi_test_clobber_f17: .localentry abi_test_clobber_f17,0 li 0, 0 std 0, -8(1) lfd 17, -8(1) blr .size abi_test_clobber_f17,.-abi_test_clobber_f17 .globl abi_test_clobber_f18 .type abi_test_clobber_f18,@function .align 4 abi_test_clobber_f18: .localentry abi_test_clobber_f18,0 li 0, 0 std 0, -8(1) lfd 18, -8(1) blr .size abi_test_clobber_f18,.-abi_test_clobber_f18 .globl abi_test_clobber_f19 .type abi_test_clobber_f19,@function .align 4 abi_test_clobber_f19: .localentry abi_test_clobber_f19,0 li 0, 0 std 0, -8(1) lfd 19, -8(1) blr .size abi_test_clobber_f19,.-abi_test_clobber_f19 .globl abi_test_clobber_f20 .type abi_test_clobber_f20,@function .align 4 abi_test_clobber_f20: .localentry abi_test_clobber_f20,0 li 0, 0 std 0, -8(1) lfd 20, -8(1) blr .size abi_test_clobber_f20,.-abi_test_clobber_f20 .globl abi_test_clobber_f21 .type abi_test_clobber_f21,@function .align 4 abi_test_clobber_f21: .localentry abi_test_clobber_f21,0 li 0, 0 std 0, -8(1) lfd 21, -8(1) blr .size abi_test_clobber_f21,.-abi_test_clobber_f21 .globl abi_test_clobber_f22 .type abi_test_clobber_f22,@function .align 4 abi_test_clobber_f22: .localentry abi_test_clobber_f22,0 li 0, 0 std 0, -8(1) lfd 22, -8(1) blr .size abi_test_clobber_f22,.-abi_test_clobber_f22 .globl abi_test_clobber_f23 .type abi_test_clobber_f23,@function .align 4 abi_test_clobber_f23: .localentry abi_test_clobber_f23,0 li 0, 0 std 0, -8(1) lfd 23, -8(1) blr .size abi_test_clobber_f23,.-abi_test_clobber_f23 .globl abi_test_clobber_f24 .type abi_test_clobber_f24,@function .align 4 abi_test_clobber_f24: .localentry abi_test_clobber_f24,0 li 0, 0 std 0, -8(1) lfd 24, -8(1) blr .size abi_test_clobber_f24,.-abi_test_clobber_f24 .globl abi_test_clobber_f25 .type abi_test_clobber_f25,@function .align 4 abi_test_clobber_f25: .localentry abi_test_clobber_f25,0 li 0, 0 std 0, -8(1) lfd 25, -8(1) blr .size abi_test_clobber_f25,.-abi_test_clobber_f25 .globl abi_test_clobber_f26 .type abi_test_clobber_f26,@function .align 4 abi_test_clobber_f26: .localentry abi_test_clobber_f26,0 li 0, 0 std 0, -8(1) lfd 26, -8(1) blr .size abi_test_clobber_f26,.-abi_test_clobber_f26 .globl abi_test_clobber_f27 .type abi_test_clobber_f27,@function .align 4 abi_test_clobber_f27: .localentry abi_test_clobber_f27,0 li 0, 0 std 0, -8(1) lfd 27, -8(1) blr .size abi_test_clobber_f27,.-abi_test_clobber_f27 .globl abi_test_clobber_f28 .type abi_test_clobber_f28,@function .align 4 abi_test_clobber_f28: .localentry abi_test_clobber_f28,0 li 0, 0 std 0, -8(1) lfd 28, -8(1) blr .size abi_test_clobber_f28,.-abi_test_clobber_f28 .globl abi_test_clobber_f29 .type abi_test_clobber_f29,@function .align 4 abi_test_clobber_f29: .localentry abi_test_clobber_f29,0 li 0, 0 std 0, -8(1) lfd 29, -8(1) blr .size abi_test_clobber_f29,.-abi_test_clobber_f29 .globl abi_test_clobber_f30 .type abi_test_clobber_f30,@function .align 4 abi_test_clobber_f30: .localentry abi_test_clobber_f30,0 li 0, 0 std 0, -8(1) lfd 30, -8(1) blr .size abi_test_clobber_f30,.-abi_test_clobber_f30 .globl abi_test_clobber_f31 .type abi_test_clobber_f31,@function .align 4 abi_test_clobber_f31: .localentry abi_test_clobber_f31,0 li 0, 0 std 0, -8(1) lfd 31, -8(1) blr .size abi_test_clobber_f31,.-abi_test_clobber_f31 .globl abi_test_clobber_v0 .type abi_test_clobber_v0,@function .align 4 abi_test_clobber_v0: .localentry abi_test_clobber_v0,0 vxor 0, 0, 0 blr .size abi_test_clobber_v0,.-abi_test_clobber_v0 .globl abi_test_clobber_v1 .type abi_test_clobber_v1,@function .align 4 abi_test_clobber_v1: .localentry abi_test_clobber_v1,0 vxor 1, 1, 1 blr .size abi_test_clobber_v1,.-abi_test_clobber_v1 .globl abi_test_clobber_v2 .type abi_test_clobber_v2,@function .align 4 abi_test_clobber_v2: .localentry abi_test_clobber_v2,0 vxor 2, 2, 2 blr .size abi_test_clobber_v2,.-abi_test_clobber_v2 .globl abi_test_clobber_v3 .type abi_test_clobber_v3,@function .align 4 abi_test_clobber_v3: .localentry abi_test_clobber_v3,0 vxor 3, 3, 3 blr .size abi_test_clobber_v3,.-abi_test_clobber_v3 .globl abi_test_clobber_v4 .type abi_test_clobber_v4,@function .align 4 abi_test_clobber_v4: .localentry abi_test_clobber_v4,0 vxor 4, 4, 4 blr .size abi_test_clobber_v4,.-abi_test_clobber_v4 .globl abi_test_clobber_v5 .type abi_test_clobber_v5,@function .align 4 abi_test_clobber_v5: .localentry abi_test_clobber_v5,0 vxor 5, 5, 5 blr .size abi_test_clobber_v5,.-abi_test_clobber_v5 .globl abi_test_clobber_v6 .type abi_test_clobber_v6,@function .align 4 abi_test_clobber_v6: .localentry abi_test_clobber_v6,0 vxor 6, 6, 6 blr .size abi_test_clobber_v6,.-abi_test_clobber_v6 .globl abi_test_clobber_v7 .type abi_test_clobber_v7,@function .align 4 abi_test_clobber_v7: .localentry abi_test_clobber_v7,0 vxor 7, 7, 7 blr .size abi_test_clobber_v7,.-abi_test_clobber_v7 .globl abi_test_clobber_v8 .type abi_test_clobber_v8,@function .align 4 abi_test_clobber_v8: .localentry abi_test_clobber_v8,0 vxor 8, 8, 8 blr .size abi_test_clobber_v8,.-abi_test_clobber_v8 .globl abi_test_clobber_v9 .type abi_test_clobber_v9,@function .align 4 abi_test_clobber_v9: .localentry abi_test_clobber_v9,0 vxor 9, 9, 9 blr .size abi_test_clobber_v9,.-abi_test_clobber_v9 .globl abi_test_clobber_v10 .type abi_test_clobber_v10,@function .align 4 abi_test_clobber_v10: .localentry abi_test_clobber_v10,0 vxor 10, 10, 10 blr .size abi_test_clobber_v10,.-abi_test_clobber_v10 .globl abi_test_clobber_v11 .type abi_test_clobber_v11,@function .align 4 abi_test_clobber_v11: .localentry abi_test_clobber_v11,0 vxor 11, 11, 11 blr .size abi_test_clobber_v11,.-abi_test_clobber_v11 .globl abi_test_clobber_v12 .type abi_test_clobber_v12,@function .align 4 abi_test_clobber_v12: .localentry abi_test_clobber_v12,0 vxor 12, 12, 12 blr .size abi_test_clobber_v12,.-abi_test_clobber_v12 .globl abi_test_clobber_v13 .type abi_test_clobber_v13,@function .align 4 abi_test_clobber_v13: .localentry abi_test_clobber_v13,0 vxor 13, 13, 13 blr .size abi_test_clobber_v13,.-abi_test_clobber_v13 .globl abi_test_clobber_v14 .type abi_test_clobber_v14,@function .align 4 abi_test_clobber_v14: .localentry abi_test_clobber_v14,0 vxor 14, 14, 14 blr .size abi_test_clobber_v14,.-abi_test_clobber_v14 .globl abi_test_clobber_v15 .type abi_test_clobber_v15,@function .align 4 abi_test_clobber_v15: .localentry abi_test_clobber_v15,0 vxor 15, 15, 15 blr .size abi_test_clobber_v15,.-abi_test_clobber_v15 .globl abi_test_clobber_v16 .type abi_test_clobber_v16,@function .align 4 abi_test_clobber_v16: .localentry abi_test_clobber_v16,0 vxor 16, 16, 16 blr .size abi_test_clobber_v16,.-abi_test_clobber_v16 .globl abi_test_clobber_v17 .type abi_test_clobber_v17,@function .align 4 abi_test_clobber_v17: .localentry abi_test_clobber_v17,0 vxor 17, 17, 17 blr .size abi_test_clobber_v17,.-abi_test_clobber_v17 .globl abi_test_clobber_v18 .type abi_test_clobber_v18,@function .align 4 abi_test_clobber_v18: .localentry abi_test_clobber_v18,0 vxor 18, 18, 18 blr .size abi_test_clobber_v18,.-abi_test_clobber_v18 .globl abi_test_clobber_v19 .type abi_test_clobber_v19,@function .align 4 abi_test_clobber_v19: .localentry abi_test_clobber_v19,0 vxor 19, 19, 19 blr .size abi_test_clobber_v19,.-abi_test_clobber_v19 .globl abi_test_clobber_v20 .type abi_test_clobber_v20,@function .align 4 abi_test_clobber_v20: .localentry abi_test_clobber_v20,0 vxor 20, 20, 20 blr .size abi_test_clobber_v20,.-abi_test_clobber_v20 .globl abi_test_clobber_v21 .type abi_test_clobber_v21,@function .align 4 abi_test_clobber_v21: .localentry abi_test_clobber_v21,0 vxor 21, 21, 21 blr .size abi_test_clobber_v21,.-abi_test_clobber_v21 .globl abi_test_clobber_v22 .type abi_test_clobber_v22,@function .align 4 abi_test_clobber_v22: .localentry abi_test_clobber_v22,0 vxor 22, 22, 22 blr .size abi_test_clobber_v22,.-abi_test_clobber_v22 .globl abi_test_clobber_v23 .type abi_test_clobber_v23,@function .align 4 abi_test_clobber_v23: .localentry abi_test_clobber_v23,0 vxor 23, 23, 23 blr .size abi_test_clobber_v23,.-abi_test_clobber_v23 .globl abi_test_clobber_v24 .type abi_test_clobber_v24,@function .align 4 abi_test_clobber_v24: .localentry abi_test_clobber_v24,0 vxor 24, 24, 24 blr .size abi_test_clobber_v24,.-abi_test_clobber_v24 .globl abi_test_clobber_v25 .type abi_test_clobber_v25,@function .align 4 abi_test_clobber_v25: .localentry abi_test_clobber_v25,0 vxor 25, 25, 25 blr .size abi_test_clobber_v25,.-abi_test_clobber_v25 .globl abi_test_clobber_v26 .type abi_test_clobber_v26,@function .align 4 abi_test_clobber_v26: .localentry abi_test_clobber_v26,0 vxor 26, 26, 26 blr .size abi_test_clobber_v26,.-abi_test_clobber_v26 .globl abi_test_clobber_v27 .type abi_test_clobber_v27,@function .align 4 abi_test_clobber_v27: .localentry abi_test_clobber_v27,0 vxor 27, 27, 27 blr .size abi_test_clobber_v27,.-abi_test_clobber_v27 .globl abi_test_clobber_v28 .type abi_test_clobber_v28,@function .align 4 abi_test_clobber_v28: .localentry abi_test_clobber_v28,0 vxor 28, 28, 28 blr .size abi_test_clobber_v28,.-abi_test_clobber_v28 .globl abi_test_clobber_v29 .type abi_test_clobber_v29,@function .align 4 abi_test_clobber_v29: .localentry abi_test_clobber_v29,0 vxor 29, 29, 29 blr .size abi_test_clobber_v29,.-abi_test_clobber_v29 .globl abi_test_clobber_v30 .type abi_test_clobber_v30,@function .align 4 abi_test_clobber_v30: .localentry abi_test_clobber_v30,0 vxor 30, 30, 30 blr .size abi_test_clobber_v30,.-abi_test_clobber_v30 .globl abi_test_clobber_v31 .type abi_test_clobber_v31,@function .align 4 abi_test_clobber_v31: .localentry abi_test_clobber_v31,0 vxor 31, 31, 31 blr .size abi_test_clobber_v31,.-abi_test_clobber_v31 .globl abi_test_clobber_cr0 .type abi_test_clobber_cr0,@function .align 4 abi_test_clobber_cr0: .localentry abi_test_clobber_cr0,0 mfcr 0 not 0, 0 mtcrf 128, 0 blr .size abi_test_clobber_cr0,.-abi_test_clobber_cr0 .globl abi_test_clobber_cr1 .type abi_test_clobber_cr1,@function .align 4 abi_test_clobber_cr1: .localentry abi_test_clobber_cr1,0 mfcr 0 not 0, 0 mtcrf 64, 0 blr .size abi_test_clobber_cr1,.-abi_test_clobber_cr1 .globl abi_test_clobber_cr2 .type abi_test_clobber_cr2,@function .align 4 abi_test_clobber_cr2: .localentry abi_test_clobber_cr2,0 mfcr 0 not 0, 0 mtcrf 32, 0 blr .size abi_test_clobber_cr2,.-abi_test_clobber_cr2 .globl abi_test_clobber_cr3 .type abi_test_clobber_cr3,@function .align 4 abi_test_clobber_cr3: .localentry abi_test_clobber_cr3,0 mfcr 0 not 0, 0 mtcrf 16, 0 blr .size abi_test_clobber_cr3,.-abi_test_clobber_cr3 .globl abi_test_clobber_cr4 .type abi_test_clobber_cr4,@function .align 4 abi_test_clobber_cr4: .localentry abi_test_clobber_cr4,0 mfcr 0 not 0, 0 mtcrf 8, 0 blr .size abi_test_clobber_cr4,.-abi_test_clobber_cr4 .globl abi_test_clobber_cr5 .type abi_test_clobber_cr5,@function .align 4 abi_test_clobber_cr5: .localentry abi_test_clobber_cr5,0 mfcr 0 not 0, 0 mtcrf 4, 0 blr .size abi_test_clobber_cr5,.-abi_test_clobber_cr5 .globl abi_test_clobber_cr6 .type abi_test_clobber_cr6,@function .align 4 abi_test_clobber_cr6: .localentry abi_test_clobber_cr6,0 mfcr 0 not 0, 0 mtcrf 2, 0 blr .size abi_test_clobber_cr6,.-abi_test_clobber_cr6 .globl abi_test_clobber_cr7 .type abi_test_clobber_cr7,@function .align 4 abi_test_clobber_cr7: .localentry abi_test_clobber_cr7,0 mfcr 0 not 0, 0 mtcrf 1, 0 blr .size abi_test_clobber_cr7,.-abi_test_clobber_cr7 .globl abi_test_clobber_ctr .type abi_test_clobber_ctr,@function .align 4 abi_test_clobber_ctr: .localentry abi_test_clobber_ctr,0 li 0, 0 mtctr 0 blr .size abi_test_clobber_ctr,.-abi_test_clobber_ctr .globl abi_test_clobber_lr .type abi_test_clobber_lr,@function .align 4 abi_test_clobber_lr: .localentry abi_test_clobber_lr,0 mflr 0 mtctr 0 li 0, 0 mtlr 0 bctr .size abi_test_clobber_lr,.-abi_test_clobber_lr #endif // !OPENSSL_NO_ASM && __powerpc64__ && __ELF__ #if defined(__ELF__) // See https://www.airs.com/blog/archives/518. .section .note.GNU-stack,"",%progbits #endif
marvin-hansen/iggy-streaming-system
51,032
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-ppc64le/crypto/fipsmodule/aesp8-ppc.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(__powerpc64__) && defined(__ELF__) #include <openssl/boringssl_prefix_symbols_asm.h> .machine "any" .abiversion 2 .text .align 7 .Lrcon: .byte 0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01 .byte 0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b .byte 0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d .byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .Lconsts: mflr 0 bcl 20,31,$+4 mflr 6 addi 6,6,-0x48 mtlr 0 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .byte 65,69,83,32,102,111,114,32,80,111,119,101,114,73,83,65,32,50,46,48,55,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .globl aes_hw_set_encrypt_key .type aes_hw_set_encrypt_key,@function .align 5 aes_hw_set_encrypt_key: .localentry aes_hw_set_encrypt_key,0 .Lset_encrypt_key: mflr 11 std 11,16(1) li 6,-1 cmpldi 3,0 beq- .Lenc_key_abort cmpldi 5,0 beq- .Lenc_key_abort li 6,-2 cmpwi 4,128 blt- .Lenc_key_abort cmpwi 4,256 bgt- .Lenc_key_abort andi. 0,4,0x3f bne- .Lenc_key_abort lis 0,0xfff0 li 12,-1 or 0,0,0 bl .Lconsts mtlr 11 neg 9,3 lvx 1,0,3 addi 3,3,15 lvsr 3,0,9 li 8,0x20 cmpwi 4,192 lvx 2,0,3 vspltisb 5,0x0f lvx 4,0,6 vxor 3,3,5 lvx 5,8,6 addi 6,6,0x10 vperm 1,1,2,3 li 7,8 vxor 0,0,0 mtctr 7 lvsl 8,0,5 vspltisb 9,-1 lvx 10,0,5 vperm 9,9,0,8 blt .Loop128 addi 3,3,8 beq .L192 addi 3,3,8 b .L256 .align 4 .Loop128: vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 bdnz .Loop128 lvx 4,0,6 vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vxor 1,1,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,0x50 li 8,10 b .Ldone .align 4 .L192: lvx 6,0,3 li 7,4 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 5,5,16 vperm 2,2,6,3 vspltisb 3,8 mtctr 7 vsububm 5,5,3 .Loop192: vperm 3,2,2,5 vsldoi 6,0,1,12 .long 0x10632509 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 7,0,2,8 vspltw 6,1,3 vxor 6,6,2 vsldoi 2,0,2,12 vadduwm 4,4,4 vxor 2,2,6 vxor 1,1,3 vxor 2,2,3 vsldoi 7,7,1,8 vperm 3,2,2,5 vsldoi 6,0,1,12 vperm 11,7,7,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vsldoi 7,1,2,8 vxor 1,1,6 vsldoi 6,0,6,12 vperm 11,7,7,8 vsel 7,10,11,9 vor 10,11,11 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 stvx 7,0,5 addi 5,5,16 vspltw 6,1,3 vxor 6,6,2 vsldoi 2,0,2,12 vadduwm 4,4,4 vxor 2,2,6 vxor 1,1,3 vxor 2,2,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,16 bdnz .Loop192 li 8,12 addi 5,5,0x20 b .Ldone .align 4 .L256: lvx 6,0,3 li 7,7 li 8,14 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 5,5,16 vperm 2,2,6,3 mtctr 7 .Loop256: vperm 3,2,2,5 vsldoi 6,0,1,12 vperm 11,2,2,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,16 bdz .Ldone vspltw 3,1,3 vsldoi 6,0,2,12 .long 0x106305C8 vxor 2,2,6 vsldoi 6,0,6,12 vxor 2,2,6 vsldoi 6,0,6,12 vxor 2,2,6 vxor 2,2,3 b .Loop256 .align 4 .Ldone: lvx 2,0,3 vsel 2,10,2,9 stvx 2,0,3 li 6,0 or 12,12,12 stw 8,0(5) .Lenc_key_abort: mr 3,6 blr .long 0 .byte 0,12,0x14,1,0,0,3,0 .long 0 .size aes_hw_set_encrypt_key,.-aes_hw_set_encrypt_key .globl aes_hw_set_decrypt_key .type aes_hw_set_decrypt_key,@function .align 5 aes_hw_set_decrypt_key: .localentry aes_hw_set_decrypt_key,0 stdu 1,-64(1) mflr 10 std 10,80(1) bl .Lset_encrypt_key mtlr 10 cmpwi 3,0 bne- .Ldec_key_abort slwi 7,8,4 subi 3,5,240 srwi 8,8,1 add 5,3,7 mtctr 8 .Ldeckey: lwz 0, 0(3) lwz 6, 4(3) lwz 7, 8(3) lwz 8, 12(3) addi 3,3,16 lwz 9, 0(5) lwz 10,4(5) lwz 11,8(5) lwz 12,12(5) stw 0, 0(5) stw 6, 4(5) stw 7, 8(5) stw 8, 12(5) subi 5,5,16 stw 9, -16(3) stw 10,-12(3) stw 11,-8(3) stw 12,-4(3) bdnz .Ldeckey xor 3,3,3 .Ldec_key_abort: addi 1,1,64 blr .long 0 .byte 0,12,4,1,0x80,0,3,0 .long 0 .size aes_hw_set_decrypt_key,.-aes_hw_set_decrypt_key .globl aes_hw_encrypt .type aes_hw_encrypt,@function .align 5 aes_hw_encrypt: .localentry aes_hw_encrypt,0 lwz 6,240(5) lis 0,0xfc00 li 12,-1 li 7,15 or 0,0,0 lvx 0,0,3 neg 11,4 lvx 1,7,3 lvsl 2,0,3 vspltisb 4,0x0f lvsr 3,0,11 vxor 2,2,4 li 7,16 vperm 0,0,1,2 lvx 1,0,5 lvsr 5,0,5 srwi 6,6,1 lvx 2,7,5 addi 7,7,16 subi 6,6,1 vperm 1,2,1,5 vxor 0,0,1 lvx 1,7,5 addi 7,7,16 mtctr 6 .Loop_enc: vperm 2,1,2,5 .long 0x10001508 lvx 2,7,5 addi 7,7,16 vperm 1,2,1,5 .long 0x10000D08 lvx 1,7,5 addi 7,7,16 bdnz .Loop_enc vperm 2,1,2,5 .long 0x10001508 lvx 2,7,5 vperm 1,2,1,5 .long 0x10000D09 vspltisb 2,-1 vxor 1,1,1 li 7,15 vperm 2,2,1,3 vxor 3,3,4 lvx 1,0,4 vperm 0,0,0,3 vsel 1,1,0,2 lvx 4,7,4 stvx 1,0,4 vsel 0,0,4,2 stvx 0,7,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .long 0 .size aes_hw_encrypt,.-aes_hw_encrypt .globl aes_hw_decrypt .type aes_hw_decrypt,@function .align 5 aes_hw_decrypt: .localentry aes_hw_decrypt,0 lwz 6,240(5) lis 0,0xfc00 li 12,-1 li 7,15 or 0,0,0 lvx 0,0,3 neg 11,4 lvx 1,7,3 lvsl 2,0,3 vspltisb 4,0x0f lvsr 3,0,11 vxor 2,2,4 li 7,16 vperm 0,0,1,2 lvx 1,0,5 lvsr 5,0,5 srwi 6,6,1 lvx 2,7,5 addi 7,7,16 subi 6,6,1 vperm 1,2,1,5 vxor 0,0,1 lvx 1,7,5 addi 7,7,16 mtctr 6 .Loop_dec: vperm 2,1,2,5 .long 0x10001548 lvx 2,7,5 addi 7,7,16 vperm 1,2,1,5 .long 0x10000D48 lvx 1,7,5 addi 7,7,16 bdnz .Loop_dec vperm 2,1,2,5 .long 0x10001548 lvx 2,7,5 vperm 1,2,1,5 .long 0x10000D49 vspltisb 2,-1 vxor 1,1,1 li 7,15 vperm 2,2,1,3 vxor 3,3,4 lvx 1,0,4 vperm 0,0,0,3 vsel 1,1,0,2 lvx 4,7,4 stvx 1,0,4 vsel 0,0,4,2 stvx 0,7,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .long 0 .size aes_hw_decrypt,.-aes_hw_decrypt .globl aes_hw_cbc_encrypt .type aes_hw_cbc_encrypt,@function .align 5 aes_hw_cbc_encrypt: .localentry aes_hw_cbc_encrypt,0 cmpldi 5,16 .long 0x4dc00020 cmpwi 8,0 lis 0,0xffe0 li 12,-1 or 0,0,0 li 10,15 vxor 0,0,0 vspltisb 3,0x0f lvx 4,0,7 lvsl 6,0,7 lvx 5,10,7 vxor 6,6,3 vperm 4,4,5,6 neg 11,3 lvsr 10,0,6 lwz 9,240(6) lvsr 6,0,11 lvx 5,0,3 addi 3,3,15 vxor 6,6,3 lvsl 8,0,4 vspltisb 9,-1 lvx 7,0,4 vperm 9,9,0,8 vxor 8,8,3 srwi 9,9,1 li 10,16 subi 9,9,1 beq .Lcbc_dec .Lcbc_enc: vor 2,5,5 lvx 5,0,3 addi 3,3,16 mtctr 9 subi 5,5,16 lvx 0,0,6 vperm 2,2,5,6 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 vxor 2,2,0 lvx 0,10,6 addi 10,10,16 vxor 2,2,4 .Loop_cbc_enc: vperm 1,0,1,10 .long 0x10420D08 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 .long 0x10420508 lvx 0,10,6 addi 10,10,16 bdnz .Loop_cbc_enc vperm 1,0,1,10 .long 0x10420D08 lvx 1,10,6 li 10,16 vperm 0,1,0,10 .long 0x10820509 cmpldi 5,16 vperm 3,4,4,8 vsel 2,7,3,9 vor 7,3,3 stvx 2,0,4 addi 4,4,16 bge .Lcbc_enc b .Lcbc_done .align 4 .Lcbc_dec: cmpldi 5,128 bge _aesp8_cbc_decrypt8x vor 3,5,5 lvx 5,0,3 addi 3,3,16 mtctr 9 subi 5,5,16 lvx 0,0,6 vperm 3,3,5,6 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 vxor 2,3,0 lvx 0,10,6 addi 10,10,16 .Loop_cbc_dec: vperm 1,0,1,10 .long 0x10420D48 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 .long 0x10420548 lvx 0,10,6 addi 10,10,16 bdnz .Loop_cbc_dec vperm 1,0,1,10 .long 0x10420D48 lvx 1,10,6 li 10,16 vperm 0,1,0,10 .long 0x10420549 cmpldi 5,16 vxor 2,2,4 vor 4,3,3 vperm 3,2,2,8 vsel 2,7,3,9 vor 7,3,3 stvx 2,0,4 addi 4,4,16 bge .Lcbc_dec .Lcbc_done: addi 4,4,-1 lvx 2,0,4 vsel 2,7,2,9 stvx 2,0,4 neg 8,7 li 10,15 vxor 0,0,0 vspltisb 9,-1 vspltisb 3,0x0f lvsr 8,0,8 vperm 9,9,0,8 vxor 8,8,3 lvx 7,0,7 vperm 4,4,4,8 vsel 2,7,4,9 lvx 5,10,7 stvx 2,0,7 vsel 2,4,5,9 stvx 2,10,7 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,6,0 .long 0 .align 5 _aesp8_cbc_decrypt8x: stdu 1,-448(1) li 10,207 li 11,223 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 li 0,-1 stw 12,396(1) li 8,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 subi 9,9,3 subi 5,5,128 lvx 23,0,6 lvx 30,8,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,30,23,10 addi 11,1,79 mtctr 9 .Load_cbc_dec_key: vperm 24,31,30,10 lvx 30,8,6 addi 6,6,0x20 stvx 24,0,11 vperm 25,30,31,10 lvx 31,0,6 stvx 25,8,11 addi 11,11,0x20 bdnz .Load_cbc_dec_key lvx 26,8,6 vperm 24,31,30,10 lvx 27,26,6 stvx 24,0,11 vperm 25,26,31,10 lvx 28,27,6 stvx 25,8,11 addi 11,1,79 vperm 26,27,26,10 lvx 29,28,6 vperm 27,28,27,10 lvx 30,29,6 vperm 28,29,28,10 lvx 31,30,6 vperm 29,30,29,10 lvx 14,31,6 vperm 30,31,30,10 lvx 24,0,11 vperm 31,14,31,10 lvx 25,8,11 subi 3,3,15 li 10,8 .long 0x7C001E99 lvsl 6,0,10 vspltisb 3,0x0f .long 0x7C281E99 vxor 6,6,3 .long 0x7C5A1E99 vperm 0,0,0,6 .long 0x7C7B1E99 vperm 1,1,1,6 .long 0x7D5C1E99 vperm 2,2,2,6 vxor 14,0,23 .long 0x7D7D1E99 vperm 3,3,3,6 vxor 15,1,23 .long 0x7D9E1E99 vperm 10,10,10,6 vxor 16,2,23 .long 0x7DBF1E99 addi 3,3,0x80 vperm 11,11,11,6 vxor 17,3,23 vperm 12,12,12,6 vxor 18,10,23 vperm 13,13,13,6 vxor 19,11,23 vxor 20,12,23 vxor 21,13,23 mtctr 9 b .Loop_cbc_dec8x .align 5 .Loop_cbc_dec8x: .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 lvx 24,26,11 addi 11,11,0x20 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 lvx 25,8,11 bdnz .Loop_cbc_dec8x subic 5,5,128 .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 subfe. 0,0,0 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 and 0,0,5 .long 0x11CED548 .long 0x11EFD548 .long 0x1210D548 .long 0x1231D548 .long 0x1252D548 .long 0x1273D548 .long 0x1294D548 .long 0x12B5D548 add 3,3,0 .long 0x11CEDD48 .long 0x11EFDD48 .long 0x1210DD48 .long 0x1231DD48 .long 0x1252DD48 .long 0x1273DD48 .long 0x1294DD48 .long 0x12B5DD48 addi 11,1,79 .long 0x11CEE548 .long 0x11EFE548 .long 0x1210E548 .long 0x1231E548 .long 0x1252E548 .long 0x1273E548 .long 0x1294E548 .long 0x12B5E548 lvx 24,0,11 .long 0x11CEED48 .long 0x11EFED48 .long 0x1210ED48 .long 0x1231ED48 .long 0x1252ED48 .long 0x1273ED48 .long 0x1294ED48 .long 0x12B5ED48 lvx 25,8,11 .long 0x11CEF548 vxor 4,4,31 .long 0x11EFF548 vxor 0,0,31 .long 0x1210F548 vxor 1,1,31 .long 0x1231F548 vxor 2,2,31 .long 0x1252F548 vxor 3,3,31 .long 0x1273F548 vxor 10,10,31 .long 0x1294F548 vxor 11,11,31 .long 0x12B5F548 vxor 12,12,31 .long 0x11CE2549 .long 0x11EF0549 .long 0x7C001E99 .long 0x12100D49 .long 0x7C281E99 .long 0x12311549 vperm 0,0,0,6 .long 0x7C5A1E99 .long 0x12521D49 vperm 1,1,1,6 .long 0x7C7B1E99 .long 0x12735549 vperm 2,2,2,6 .long 0x7D5C1E99 .long 0x12945D49 vperm 3,3,3,6 .long 0x7D7D1E99 .long 0x12B56549 vperm 10,10,10,6 .long 0x7D9E1E99 vor 4,13,13 vperm 11,11,11,6 .long 0x7DBF1E99 addi 3,3,0x80 vperm 14,14,14,6 vperm 15,15,15,6 .long 0x7DC02799 vperm 12,12,12,6 vxor 14,0,23 vperm 16,16,16,6 .long 0x7DE82799 vperm 13,13,13,6 vxor 15,1,23 vperm 17,17,17,6 .long 0x7E1A2799 vxor 16,2,23 vperm 18,18,18,6 .long 0x7E3B2799 vxor 17,3,23 vperm 19,19,19,6 .long 0x7E5C2799 vxor 18,10,23 vperm 20,20,20,6 .long 0x7E7D2799 vxor 19,11,23 vperm 21,21,21,6 .long 0x7E9E2799 vxor 20,12,23 .long 0x7EBF2799 addi 4,4,0x80 vxor 21,13,23 mtctr 9 beq .Loop_cbc_dec8x addic. 5,5,128 beq .Lcbc_dec8x_done nop nop .Loop_cbc_dec8x_tail: .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 lvx 24,26,11 addi 11,11,0x20 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 lvx 25,8,11 bdnz .Loop_cbc_dec8x_tail .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 .long 0x11EFD548 .long 0x1210D548 .long 0x1231D548 .long 0x1252D548 .long 0x1273D548 .long 0x1294D548 .long 0x12B5D548 .long 0x11EFDD48 .long 0x1210DD48 .long 0x1231DD48 .long 0x1252DD48 .long 0x1273DD48 .long 0x1294DD48 .long 0x12B5DD48 .long 0x11EFE548 .long 0x1210E548 .long 0x1231E548 .long 0x1252E548 .long 0x1273E548 .long 0x1294E548 .long 0x12B5E548 .long 0x11EFED48 .long 0x1210ED48 .long 0x1231ED48 .long 0x1252ED48 .long 0x1273ED48 .long 0x1294ED48 .long 0x12B5ED48 .long 0x11EFF548 vxor 4,4,31 .long 0x1210F548 vxor 1,1,31 .long 0x1231F548 vxor 2,2,31 .long 0x1252F548 vxor 3,3,31 .long 0x1273F548 vxor 10,10,31 .long 0x1294F548 vxor 11,11,31 .long 0x12B5F548 vxor 12,12,31 cmplwi 5,32 blt .Lcbc_dec8x_one nop beq .Lcbc_dec8x_two cmplwi 5,64 blt .Lcbc_dec8x_three nop beq .Lcbc_dec8x_four cmplwi 5,96 blt .Lcbc_dec8x_five nop beq .Lcbc_dec8x_six .Lcbc_dec8x_seven: .long 0x11EF2549 .long 0x12100D49 .long 0x12311549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 vperm 20,20,20,6 .long 0x7E7C2799 vperm 21,21,21,6 .long 0x7E9D2799 .long 0x7EBE2799 addi 4,4,0x70 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_six: .long 0x12102549 .long 0x12311549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 16,16,16,6 vperm 17,17,17,6 .long 0x7E002799 vperm 18,18,18,6 .long 0x7E282799 vperm 19,19,19,6 .long 0x7E5A2799 vperm 20,20,20,6 .long 0x7E7B2799 vperm 21,21,21,6 .long 0x7E9C2799 .long 0x7EBD2799 addi 4,4,0x60 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_five: .long 0x12312549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 17,17,17,6 vperm 18,18,18,6 .long 0x7E202799 vperm 19,19,19,6 .long 0x7E482799 vperm 20,20,20,6 .long 0x7E7A2799 vperm 21,21,21,6 .long 0x7E9B2799 .long 0x7EBC2799 addi 4,4,0x50 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_four: .long 0x12522549 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 18,18,18,6 vperm 19,19,19,6 .long 0x7E402799 vperm 20,20,20,6 .long 0x7E682799 vperm 21,21,21,6 .long 0x7E9A2799 .long 0x7EBB2799 addi 4,4,0x40 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_three: .long 0x12732549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 19,19,19,6 vperm 20,20,20,6 .long 0x7E602799 vperm 21,21,21,6 .long 0x7E882799 .long 0x7EBA2799 addi 4,4,0x30 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_two: .long 0x12942549 .long 0x12B56549 vor 4,13,13 vperm 20,20,20,6 vperm 21,21,21,6 .long 0x7E802799 .long 0x7EA82799 addi 4,4,0x20 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_one: .long 0x12B52549 vor 4,13,13 vperm 21,21,21,6 .long 0x7EA02799 addi 4,4,0x10 .Lcbc_dec8x_done: vperm 4,4,4,6 .long 0x7C803F99 li 10,79 li 11,95 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_hw_cbc_encrypt,.-aes_hw_cbc_encrypt .globl aes_hw_ctr32_encrypt_blocks .type aes_hw_ctr32_encrypt_blocks,@function .align 5 aes_hw_ctr32_encrypt_blocks: .localentry aes_hw_ctr32_encrypt_blocks,0 cmpldi 5,1 .long 0x4dc00020 lis 0,0xfff0 li 12,-1 or 0,0,0 li 10,15 vxor 0,0,0 vspltisb 3,0x0f lvx 4,0,7 lvsl 6,0,7 lvx 5,10,7 vspltisb 11,1 vxor 6,6,3 vperm 4,4,5,6 vsldoi 11,0,11,1 neg 11,3 lvsr 10,0,6 lwz 9,240(6) lvsr 6,0,11 lvx 5,0,3 addi 3,3,15 vxor 6,6,3 srwi 9,9,1 li 10,16 subi 9,9,1 cmpldi 5,8 bge _aesp8_ctr32_encrypt8x lvsl 8,0,4 vspltisb 9,-1 lvx 7,0,4 vperm 9,9,0,8 vxor 8,8,3 lvx 0,0,6 mtctr 9 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 vxor 2,4,0 lvx 0,10,6 addi 10,10,16 b .Loop_ctr32_enc .align 5 .Loop_ctr32_enc: vperm 1,0,1,10 .long 0x10420D08 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 .long 0x10420508 lvx 0,10,6 addi 10,10,16 bdnz .Loop_ctr32_enc vadduwm 4,4,11 vor 3,5,5 lvx 5,0,3 addi 3,3,16 subic. 5,5,1 vperm 1,0,1,10 .long 0x10420D08 lvx 1,10,6 vperm 3,3,5,6 li 10,16 vperm 1,1,0,10 lvx 0,0,6 vxor 3,3,1 .long 0x10421D09 lvx 1,10,6 addi 10,10,16 vperm 2,2,2,8 vsel 3,7,2,9 mtctr 9 vperm 0,1,0,10 vor 7,2,2 vxor 2,4,0 lvx 0,10,6 addi 10,10,16 stvx 3,0,4 addi 4,4,16 bne .Loop_ctr32_enc addi 4,4,-1 lvx 2,0,4 vsel 2,7,2,9 stvx 2,0,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,6,0 .long 0 .align 5 _aesp8_ctr32_encrypt8x: stdu 1,-448(1) li 10,207 li 11,223 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 li 0,-1 stw 12,396(1) li 8,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 subi 9,9,3 lvx 23,0,6 lvx 30,8,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,30,23,10 addi 11,1,79 mtctr 9 .Load_ctr32_enc_key: vperm 24,31,30,10 lvx 30,8,6 addi 6,6,0x20 stvx 24,0,11 vperm 25,30,31,10 lvx 31,0,6 stvx 25,8,11 addi 11,11,0x20 bdnz .Load_ctr32_enc_key lvx 26,8,6 vperm 24,31,30,10 lvx 27,26,6 stvx 24,0,11 vperm 25,26,31,10 lvx 28,27,6 stvx 25,8,11 addi 11,1,79 vperm 26,27,26,10 lvx 29,28,6 vperm 27,28,27,10 lvx 30,29,6 vperm 28,29,28,10 lvx 31,30,6 vperm 29,30,29,10 lvx 15,31,6 vperm 30,31,30,10 lvx 24,0,11 vperm 31,15,31,10 lvx 25,8,11 vadduwm 7,11,11 subi 3,3,15 sldi 5,5,4 vadduwm 16,4,11 vadduwm 17,4,7 vxor 15,4,23 li 10,8 vadduwm 18,16,7 vxor 16,16,23 lvsl 6,0,10 vadduwm 19,17,7 vxor 17,17,23 vspltisb 3,0x0f vadduwm 20,18,7 vxor 18,18,23 vxor 6,6,3 vadduwm 21,19,7 vxor 19,19,23 vadduwm 22,20,7 vxor 20,20,23 vadduwm 4,21,7 vxor 21,21,23 vxor 22,22,23 mtctr 9 b .Loop_ctr32_enc8x .align 5 .Loop_ctr32_enc8x: .long 0x11EFC508 .long 0x1210C508 .long 0x1231C508 .long 0x1252C508 .long 0x1273C508 .long 0x1294C508 .long 0x12B5C508 .long 0x12D6C508 .Loop_ctr32_enc8x_middle: lvx 24,26,11 addi 11,11,0x20 .long 0x11EFCD08 .long 0x1210CD08 .long 0x1231CD08 .long 0x1252CD08 .long 0x1273CD08 .long 0x1294CD08 .long 0x12B5CD08 .long 0x12D6CD08 lvx 25,8,11 bdnz .Loop_ctr32_enc8x subic 11,5,256 .long 0x11EFC508 .long 0x1210C508 .long 0x1231C508 .long 0x1252C508 .long 0x1273C508 .long 0x1294C508 .long 0x12B5C508 .long 0x12D6C508 subfe 0,0,0 .long 0x11EFCD08 .long 0x1210CD08 .long 0x1231CD08 .long 0x1252CD08 .long 0x1273CD08 .long 0x1294CD08 .long 0x12B5CD08 .long 0x12D6CD08 and 0,0,11 addi 11,1,79 .long 0x11EFD508 .long 0x1210D508 .long 0x1231D508 .long 0x1252D508 .long 0x1273D508 .long 0x1294D508 .long 0x12B5D508 .long 0x12D6D508 lvx 24,0,11 subic 5,5,129 .long 0x11EFDD08 addi 5,5,1 .long 0x1210DD08 .long 0x1231DD08 .long 0x1252DD08 .long 0x1273DD08 .long 0x1294DD08 .long 0x12B5DD08 .long 0x12D6DD08 lvx 25,8,11 .long 0x11EFE508 .long 0x7C001E99 .long 0x1210E508 .long 0x7C281E99 .long 0x1231E508 .long 0x7C5A1E99 .long 0x1252E508 .long 0x7C7B1E99 .long 0x1273E508 .long 0x7D5C1E99 .long 0x1294E508 .long 0x7D9D1E99 .long 0x12B5E508 .long 0x7DBE1E99 .long 0x12D6E508 .long 0x7DDF1E99 addi 3,3,0x80 .long 0x11EFED08 vperm 0,0,0,6 .long 0x1210ED08 vperm 1,1,1,6 .long 0x1231ED08 vperm 2,2,2,6 .long 0x1252ED08 vperm 3,3,3,6 .long 0x1273ED08 vperm 10,10,10,6 .long 0x1294ED08 vperm 12,12,12,6 .long 0x12B5ED08 vperm 13,13,13,6 .long 0x12D6ED08 vperm 14,14,14,6 add 3,3,0 subfe. 0,0,0 .long 0x11EFF508 vxor 0,0,31 .long 0x1210F508 vxor 1,1,31 .long 0x1231F508 vxor 2,2,31 .long 0x1252F508 vxor 3,3,31 .long 0x1273F508 vxor 10,10,31 .long 0x1294F508 vxor 12,12,31 .long 0x12B5F508 vxor 13,13,31 .long 0x12D6F508 vxor 14,14,31 bne .Lctr32_enc8x_break .long 0x100F0509 .long 0x10300D09 vadduwm 16,4,11 .long 0x10511509 vadduwm 17,4,7 vxor 15,4,23 .long 0x10721D09 vadduwm 18,16,7 vxor 16,16,23 .long 0x11535509 vadduwm 19,17,7 vxor 17,17,23 .long 0x11946509 vadduwm 20,18,7 vxor 18,18,23 .long 0x11B56D09 vadduwm 21,19,7 vxor 19,19,23 .long 0x11D67509 vadduwm 22,20,7 vxor 20,20,23 vperm 0,0,0,6 vadduwm 4,21,7 vxor 21,21,23 vperm 1,1,1,6 vxor 22,22,23 mtctr 9 .long 0x11EFC508 .long 0x7C002799 vperm 2,2,2,6 .long 0x1210C508 .long 0x7C282799 vperm 3,3,3,6 .long 0x1231C508 .long 0x7C5A2799 vperm 10,10,10,6 .long 0x1252C508 .long 0x7C7B2799 vperm 12,12,12,6 .long 0x1273C508 .long 0x7D5C2799 vperm 13,13,13,6 .long 0x1294C508 .long 0x7D9D2799 vperm 14,14,14,6 .long 0x12B5C508 .long 0x7DBE2799 .long 0x12D6C508 .long 0x7DDF2799 addi 4,4,0x80 b .Loop_ctr32_enc8x_middle .align 5 .Lctr32_enc8x_break: cmpwi 5,-0x60 blt .Lctr32_enc8x_one nop beq .Lctr32_enc8x_two cmpwi 5,-0x40 blt .Lctr32_enc8x_three nop beq .Lctr32_enc8x_four cmpwi 5,-0x20 blt .Lctr32_enc8x_five nop beq .Lctr32_enc8x_six cmpwi 5,0x00 blt .Lctr32_enc8x_seven .Lctr32_enc8x_eight: .long 0x11EF0509 .long 0x12100D09 .long 0x12311509 .long 0x12521D09 .long 0x12735509 .long 0x12946509 .long 0x12B56D09 .long 0x12D67509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 vperm 20,20,20,6 .long 0x7E7C2799 vperm 21,21,21,6 .long 0x7E9D2799 vperm 22,22,22,6 .long 0x7EBE2799 .long 0x7EDF2799 addi 4,4,0x80 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_seven: .long 0x11EF0D09 .long 0x12101509 .long 0x12311D09 .long 0x12525509 .long 0x12736509 .long 0x12946D09 .long 0x12B57509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 vperm 20,20,20,6 .long 0x7E7C2799 vperm 21,21,21,6 .long 0x7E9D2799 .long 0x7EBE2799 addi 4,4,0x70 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_six: .long 0x11EF1509 .long 0x12101D09 .long 0x12315509 .long 0x12526509 .long 0x12736D09 .long 0x12947509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 vperm 20,20,20,6 .long 0x7E7C2799 .long 0x7E9D2799 addi 4,4,0x60 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_five: .long 0x11EF1D09 .long 0x12105509 .long 0x12316509 .long 0x12526D09 .long 0x12737509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 .long 0x7E7C2799 addi 4,4,0x50 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_four: .long 0x11EF5509 .long 0x12106509 .long 0x12316D09 .long 0x12527509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 .long 0x7E5B2799 addi 4,4,0x40 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_three: .long 0x11EF6509 .long 0x12106D09 .long 0x12317509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 .long 0x7E3A2799 addi 4,4,0x30 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_two: .long 0x11EF6D09 .long 0x12107509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 .long 0x7E082799 addi 4,4,0x20 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_one: .long 0x11EF7509 vperm 15,15,15,6 .long 0x7DE02799 addi 4,4,0x10 .Lctr32_enc8x_done: li 10,79 li 11,95 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_hw_ctr32_encrypt_blocks,.-aes_hw_ctr32_encrypt_blocks .globl aes_hw_xts_encrypt .type aes_hw_xts_encrypt,@function .align 5 aes_hw_xts_encrypt: .localentry aes_hw_xts_encrypt,0 mr 10,3 li 3,-1 cmpldi 5,16 .long 0x4dc00020 lis 0,0xfff0 li 12,-1 li 11,0 or 0,0,0 vspltisb 9,0x07 lvsl 6,11,11 vspltisb 11,0x0f vxor 6,6,9 li 3,15 lvx 8,0,8 lvsl 5,0,8 lvx 4,3,8 vxor 5,5,11 vperm 8,8,4,5 neg 11,10 lvsr 5,0,11 lvx 2,0,10 addi 10,10,15 vxor 5,5,11 cmpldi 7,0 beq .Lxts_enc_no_key2 lvsr 7,0,7 lwz 9,240(7) srwi 9,9,1 subi 9,9,1 li 3,16 lvx 0,0,7 lvx 1,3,7 addi 3,3,16 vperm 0,1,0,7 vxor 8,8,0 lvx 0,3,7 addi 3,3,16 mtctr 9 .Ltweak_xts_enc: vperm 1,0,1,7 .long 0x11080D08 lvx 1,3,7 addi 3,3,16 vperm 0,1,0,7 .long 0x11080508 lvx 0,3,7 addi 3,3,16 bdnz .Ltweak_xts_enc vperm 1,0,1,7 .long 0x11080D08 lvx 1,3,7 vperm 0,1,0,7 .long 0x11080509 li 8,0 b .Lxts_enc .Lxts_enc_no_key2: li 3,-16 and 5,5,3 .Lxts_enc: lvx 4,0,10 addi 10,10,16 lvsr 7,0,6 lwz 9,240(6) srwi 9,9,1 subi 9,9,1 li 3,16 vslb 10,9,9 vor 10,10,9 vspltisb 11,1 vsldoi 10,10,11,15 cmpldi 5,96 bge _aesp8_xts_encrypt6x andi. 7,5,15 subic 0,5,32 subi 7,7,16 subfe 0,0,0 and 0,0,7 add 10,10,0 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,1,0,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 b .Loop_xts_enc .align 5 .Loop_xts_enc: vperm 1,0,1,7 .long 0x10420D08 lvx 1,3,6 addi 3,3,16 vperm 0,1,0,7 .long 0x10420508 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_enc vperm 1,0,1,7 .long 0x10420D08 lvx 1,3,6 li 3,16 vperm 0,1,0,7 vxor 0,0,8 .long 0x10620509 vperm 11,3,3,6 .long 0x7D602799 addi 4,4,16 subic. 5,5,16 beq .Lxts_enc_done vor 2,4,4 lvx 4,0,10 addi 10,10,16 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 subic 0,5,32 subfe 0,0,0 and 0,0,7 add 10,10,0 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 2,2,4,5 vperm 0,1,0,7 vxor 2,2,8 vxor 3,3,0 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 bge .Loop_xts_enc vxor 3,3,8 lvsr 5,0,5 vxor 4,4,4 vspltisb 11,-1 vperm 4,4,11,5 vsel 2,2,3,4 subi 11,4,17 subi 4,4,16 mtctr 5 li 5,16 .Loop_xts_enc_steal: lbzu 0,1(11) stb 0,16(11) bdnz .Loop_xts_enc_steal mtctr 9 b .Loop_xts_enc .Lxts_enc_done: cmpldi 8,0 beq .Lxts_enc_ret vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 8,8,8,6 .long 0x7D004799 .Lxts_enc_ret: or 12,12,12 li 3,0 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_hw_xts_encrypt,.-aes_hw_xts_encrypt .globl aes_hw_xts_decrypt .type aes_hw_xts_decrypt,@function .align 5 aes_hw_xts_decrypt: .localentry aes_hw_xts_decrypt,0 mr 10,3 li 3,-1 cmpldi 5,16 .long 0x4dc00020 lis 0,0xfff8 li 12,-1 li 11,0 or 0,0,0 andi. 0,5,15 neg 0,0 andi. 0,0,16 sub 5,5,0 vspltisb 9,0x07 lvsl 6,11,11 vspltisb 11,0x0f vxor 6,6,9 li 3,15 lvx 8,0,8 lvsl 5,0,8 lvx 4,3,8 vxor 5,5,11 vperm 8,8,4,5 neg 11,10 lvsr 5,0,11 lvx 2,0,10 addi 10,10,15 vxor 5,5,11 cmpldi 7,0 beq .Lxts_dec_no_key2 lvsr 7,0,7 lwz 9,240(7) srwi 9,9,1 subi 9,9,1 li 3,16 lvx 0,0,7 lvx 1,3,7 addi 3,3,16 vperm 0,1,0,7 vxor 8,8,0 lvx 0,3,7 addi 3,3,16 mtctr 9 .Ltweak_xts_dec: vperm 1,0,1,7 .long 0x11080D08 lvx 1,3,7 addi 3,3,16 vperm 0,1,0,7 .long 0x11080508 lvx 0,3,7 addi 3,3,16 bdnz .Ltweak_xts_dec vperm 1,0,1,7 .long 0x11080D08 lvx 1,3,7 vperm 0,1,0,7 .long 0x11080509 li 8,0 b .Lxts_dec .Lxts_dec_no_key2: neg 3,5 andi. 3,3,15 add 5,5,3 .Lxts_dec: lvx 4,0,10 addi 10,10,16 lvsr 7,0,6 lwz 9,240(6) srwi 9,9,1 subi 9,9,1 li 3,16 vslb 10,9,9 vor 10,10,9 vspltisb 11,1 vsldoi 10,10,11,15 cmpldi 5,96 bge _aesp8_xts_decrypt6x lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,1,0,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 blt .Ltail_xts_dec .align 5 .Loop_xts_dec: vperm 1,0,1,7 .long 0x10420D48 lvx 1,3,6 addi 3,3,16 vperm 0,1,0,7 .long 0x10420548 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_dec vperm 1,0,1,7 .long 0x10420D48 lvx 1,3,6 li 3,16 vperm 0,1,0,7 vxor 0,0,8 .long 0x10620549 vperm 11,3,3,6 .long 0x7D602799 addi 4,4,16 subic. 5,5,16 beq .Lxts_dec_done vor 2,4,4 lvx 4,0,10 addi 10,10,16 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 2,2,4,5 vperm 0,1,0,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 bge .Loop_xts_dec .Ltail_xts_dec: vsrab 11,8,9 vaddubm 12,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 12,12,11 subi 10,10,16 add 10,10,5 vxor 2,2,8 vxor 2,2,12 .Loop_xts_dec_short: vperm 1,0,1,7 .long 0x10420D48 lvx 1,3,6 addi 3,3,16 vperm 0,1,0,7 .long 0x10420548 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_dec_short vperm 1,0,1,7 .long 0x10420D48 lvx 1,3,6 li 3,16 vperm 0,1,0,7 vxor 0,0,12 .long 0x10620549 vperm 11,3,3,6 .long 0x7D602799 vor 2,4,4 lvx 4,0,10 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,1,0,7 lvsr 5,0,5 vxor 4,4,4 vspltisb 11,-1 vperm 4,4,11,5 vsel 2,2,3,4 vxor 0,0,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 subi 11,4,1 mtctr 5 li 5,16 .Loop_xts_dec_steal: lbzu 0,1(11) stb 0,16(11) bdnz .Loop_xts_dec_steal mtctr 9 b .Loop_xts_dec .Lxts_dec_done: cmpldi 8,0 beq .Lxts_dec_ret vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 8,8,8,6 .long 0x7D004799 .Lxts_dec_ret: or 12,12,12 li 3,0 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_hw_xts_decrypt,.-aes_hw_xts_decrypt .align 5 _aesp8_xts_encrypt6x: stdu 1,-448(1) mflr 11 li 7,207 li 3,223 std 11,464(1) stvx 20,7,1 addi 7,7,32 stvx 21,3,1 addi 3,3,32 stvx 22,7,1 addi 7,7,32 stvx 23,3,1 addi 3,3,32 stvx 24,7,1 addi 7,7,32 stvx 25,3,1 addi 3,3,32 stvx 26,7,1 addi 7,7,32 stvx 27,3,1 addi 3,3,32 stvx 28,7,1 addi 7,7,32 stvx 29,3,1 addi 3,3,32 stvx 30,7,1 stvx 31,3,1 li 0,-1 stw 12,396(1) li 3,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 subi 9,9,3 lvx 23,0,6 lvx 30,3,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,30,23,7 addi 7,1,79 mtctr 9 .Load_xts_enc_key: vperm 24,31,30,7 lvx 30,3,6 addi 6,6,0x20 stvx 24,0,7 vperm 25,30,31,7 lvx 31,0,6 stvx 25,3,7 addi 7,7,0x20 bdnz .Load_xts_enc_key lvx 26,3,6 vperm 24,31,30,7 lvx 27,26,6 stvx 24,0,7 vperm 25,26,31,7 lvx 28,27,6 stvx 25,3,7 addi 7,1,79 vperm 26,27,26,7 lvx 29,28,6 vperm 27,28,27,7 lvx 30,29,6 vperm 28,29,28,7 lvx 31,30,6 vperm 29,30,29,7 lvx 22,31,6 vperm 30,31,30,7 lvx 24,0,7 vperm 31,22,31,7 lvx 25,3,7 vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 vxor 8,8,11 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 1,1,1,6 vand 11,11,10 vxor 12,1,18 vxor 8,8,11 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 2,2,2,6 vand 11,11,10 vxor 13,2,19 vxor 8,8,11 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 3,3,3,6 vand 11,11,10 vxor 14,3,20 vxor 8,8,11 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 4,4,4,6 vand 11,11,10 vxor 15,4,21 vxor 8,8,11 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 5,5,5,6 vand 11,11,10 vxor 16,5,22 vxor 8,8,11 vxor 31,31,23 mtctr 9 b .Loop_xts_enc6x .align 5 .Loop_xts_enc6x: .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 .long 0x1210C508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 .long 0x1210CD08 lvx 25,3,7 bdnz .Loop_xts_enc6x subic 5,5,96 vxor 0,17,31 .long 0x10E7C508 .long 0x118CC508 vsrab 11,8,9 vxor 17,8,23 vaddubm 8,8,8 .long 0x11ADC508 .long 0x11CEC508 vsldoi 11,11,11,15 .long 0x11EFC508 .long 0x1210C508 subfe. 0,0,0 vand 11,11,10 .long 0x10E7CD08 .long 0x118CCD08 vxor 8,8,11 .long 0x11ADCD08 .long 0x11CECD08 vxor 1,18,31 vsrab 11,8,9 vxor 18,8,23 .long 0x11EFCD08 .long 0x1210CD08 and 0,0,5 vaddubm 8,8,8 vsldoi 11,11,11,15 .long 0x10E7D508 .long 0x118CD508 vand 11,11,10 .long 0x11ADD508 .long 0x11CED508 vxor 8,8,11 .long 0x11EFD508 .long 0x1210D508 add 10,10,0 vxor 2,19,31 vsrab 11,8,9 vxor 19,8,23 vaddubm 8,8,8 .long 0x10E7DD08 .long 0x118CDD08 vsldoi 11,11,11,15 .long 0x11ADDD08 .long 0x11CEDD08 vand 11,11,10 .long 0x11EFDD08 .long 0x1210DD08 addi 7,1,79 vxor 8,8,11 .long 0x10E7E508 .long 0x118CE508 vxor 3,20,31 vsrab 11,8,9 vxor 20,8,23 .long 0x11ADE508 .long 0x11CEE508 vaddubm 8,8,8 vsldoi 11,11,11,15 .long 0x11EFE508 .long 0x1210E508 lvx 24,0,7 vand 11,11,10 .long 0x10E7ED08 .long 0x118CED08 vxor 8,8,11 .long 0x11ADED08 .long 0x11CEED08 vxor 4,21,31 vsrab 11,8,9 vxor 21,8,23 .long 0x11EFED08 .long 0x1210ED08 lvx 25,3,7 vaddubm 8,8,8 vsldoi 11,11,11,15 .long 0x10E7F508 .long 0x118CF508 vand 11,11,10 .long 0x11ADF508 .long 0x11CEF508 vxor 8,8,11 .long 0x11EFF508 .long 0x1210F508 vxor 5,22,31 vsrab 11,8,9 vxor 22,8,23 .long 0x10E70509 .long 0x7C005699 vaddubm 8,8,8 vsldoi 11,11,11,15 .long 0x118C0D09 .long 0x7C235699 .long 0x11AD1509 vperm 0,0,0,6 .long 0x7C5A5699 vand 11,11,10 .long 0x11CE1D09 vperm 1,1,1,6 .long 0x7C7B5699 .long 0x11EF2509 vperm 2,2,2,6 .long 0x7C9C5699 vxor 8,8,11 .long 0x11702D09 vperm 3,3,3,6 .long 0x7CBD5699 addi 10,10,0x60 vperm 4,4,4,6 vperm 5,5,5,6 vperm 7,7,7,6 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,17 vperm 13,13,13,6 .long 0x7D832799 vxor 12,1,18 vperm 14,14,14,6 .long 0x7DBA2799 vxor 13,2,19 vperm 15,15,15,6 .long 0x7DDB2799 vxor 14,3,20 vperm 16,11,11,6 .long 0x7DFC2799 vxor 15,4,21 .long 0x7E1D2799 vxor 16,5,22 addi 4,4,0x60 mtctr 9 beq .Loop_xts_enc6x addic. 5,5,0x60 beq .Lxts_enc6x_zero cmpwi 5,0x20 blt .Lxts_enc6x_one nop beq .Lxts_enc6x_two cmpwi 5,0x40 blt .Lxts_enc6x_three nop beq .Lxts_enc6x_four .Lxts_enc6x_five: vxor 7,1,17 vxor 12,2,18 vxor 13,3,19 vxor 14,4,20 vxor 15,5,21 bl _aesp8_xts_enc5x vperm 7,7,7,6 vor 17,22,22 vperm 12,12,12,6 .long 0x7CE02799 vperm 13,13,13,6 .long 0x7D832799 vperm 14,14,14,6 .long 0x7DBA2799 vxor 11,15,22 vperm 15,15,15,6 .long 0x7DDB2799 .long 0x7DFC2799 addi 4,4,0x50 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_four: vxor 7,2,17 vxor 12,3,18 vxor 13,4,19 vxor 14,5,20 vxor 15,15,15 bl _aesp8_xts_enc5x vperm 7,7,7,6 vor 17,21,21 vperm 12,12,12,6 .long 0x7CE02799 vperm 13,13,13,6 .long 0x7D832799 vxor 11,14,21 vperm 14,14,14,6 .long 0x7DBA2799 .long 0x7DDB2799 addi 4,4,0x40 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_three: vxor 7,3,17 vxor 12,4,18 vxor 13,5,19 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_enc5x vperm 7,7,7,6 vor 17,20,20 vperm 12,12,12,6 .long 0x7CE02799 vxor 11,13,20 vperm 13,13,13,6 .long 0x7D832799 .long 0x7DBA2799 addi 4,4,0x30 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_two: vxor 7,4,17 vxor 12,5,18 vxor 13,13,13 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_enc5x vperm 7,7,7,6 vor 17,19,19 vxor 11,12,19 vperm 12,12,12,6 .long 0x7CE02799 .long 0x7D832799 addi 4,4,0x20 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_one: vxor 7,5,17 nop .Loop_xts_enc1x: .long 0x10E7C508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 lvx 25,3,7 bdnz .Loop_xts_enc1x add 10,10,31 cmpwi 31,0 .long 0x10E7C508 subi 10,10,16 .long 0x10E7CD08 lvsr 5,0,31 .long 0x10E7D508 .long 0x7C005699 .long 0x10E7DD08 addi 7,1,79 .long 0x10E7E508 lvx 24,0,7 .long 0x10E7ED08 lvx 25,3,7 vxor 17,17,31 vperm 0,0,0,6 .long 0x10E7F508 vperm 0,0,0,5 .long 0x10E78D09 vor 17,18,18 vxor 11,7,18 vperm 7,7,7,6 .long 0x7CE02799 addi 4,4,0x10 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_zero: cmpwi 31,0 beq .Lxts_enc6x_done add 10,10,31 subi 10,10,16 .long 0x7C005699 lvsr 5,0,31 vperm 0,0,0,6 vperm 0,0,0,5 vxor 11,11,17 .Lxts_enc6x_steal: vxor 0,0,17 vxor 7,7,7 vspltisb 12,-1 vperm 7,7,12,5 vsel 7,0,11,7 subi 30,4,17 subi 4,4,16 mtctr 31 .Loop_xts_enc6x_steal: lbzu 0,1(30) stb 0,16(30) bdnz .Loop_xts_enc6x_steal li 31,0 mtctr 9 b .Loop_xts_enc1x .align 4 .Lxts_enc6x_done: cmpldi 8,0 beq .Lxts_enc6x_ret vxor 8,17,23 vperm 8,8,8,6 .long 0x7D004799 .Lxts_enc6x_ret: mtlr 11 li 10,79 li 11,95 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,1,0x80,6,6,0 .long 0 .align 5 _aesp8_xts_enc5x: .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 lvx 25,3,7 bdnz _aesp8_xts_enc5x add 10,10,31 cmpwi 31,0 .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 subi 10,10,16 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 vxor 17,17,31 .long 0x10E7D508 lvsr 5,0,31 .long 0x118CD508 .long 0x11ADD508 .long 0x11CED508 .long 0x11EFD508 vxor 1,18,31 .long 0x10E7DD08 .long 0x7C005699 .long 0x118CDD08 .long 0x11ADDD08 .long 0x11CEDD08 .long 0x11EFDD08 vxor 2,19,31 addi 7,1,79 .long 0x10E7E508 .long 0x118CE508 .long 0x11ADE508 .long 0x11CEE508 .long 0x11EFE508 lvx 24,0,7 vxor 3,20,31 .long 0x10E7ED08 vperm 0,0,0,6 .long 0x118CED08 .long 0x11ADED08 .long 0x11CEED08 .long 0x11EFED08 lvx 25,3,7 vxor 4,21,31 .long 0x10E7F508 vperm 0,0,0,5 .long 0x118CF508 .long 0x11ADF508 .long 0x11CEF508 .long 0x11EFF508 .long 0x10E78D09 .long 0x118C0D09 .long 0x11AD1509 .long 0x11CE1D09 .long 0x11EF2509 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .align 5 _aesp8_xts_decrypt6x: stdu 1,-448(1) mflr 11 li 7,207 li 3,223 std 11,464(1) stvx 20,7,1 addi 7,7,32 stvx 21,3,1 addi 3,3,32 stvx 22,7,1 addi 7,7,32 stvx 23,3,1 addi 3,3,32 stvx 24,7,1 addi 7,7,32 stvx 25,3,1 addi 3,3,32 stvx 26,7,1 addi 7,7,32 stvx 27,3,1 addi 3,3,32 stvx 28,7,1 addi 7,7,32 stvx 29,3,1 addi 3,3,32 stvx 30,7,1 stvx 31,3,1 li 0,-1 stw 12,396(1) li 3,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 subi 9,9,3 lvx 23,0,6 lvx 30,3,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,30,23,7 addi 7,1,79 mtctr 9 .Load_xts_dec_key: vperm 24,31,30,7 lvx 30,3,6 addi 6,6,0x20 stvx 24,0,7 vperm 25,30,31,7 lvx 31,0,6 stvx 25,3,7 addi 7,7,0x20 bdnz .Load_xts_dec_key lvx 26,3,6 vperm 24,31,30,7 lvx 27,26,6 stvx 24,0,7 vperm 25,26,31,7 lvx 28,27,6 stvx 25,3,7 addi 7,1,79 vperm 26,27,26,7 lvx 29,28,6 vperm 27,28,27,7 lvx 30,29,6 vperm 28,29,28,7 lvx 31,30,6 vperm 29,30,29,7 lvx 22,31,6 vperm 30,31,30,7 lvx 24,0,7 vperm 31,22,31,7 lvx 25,3,7 vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 vxor 8,8,11 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 1,1,1,6 vand 11,11,10 vxor 12,1,18 vxor 8,8,11 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 2,2,2,6 vand 11,11,10 vxor 13,2,19 vxor 8,8,11 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 3,3,3,6 vand 11,11,10 vxor 14,3,20 vxor 8,8,11 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 4,4,4,6 vand 11,11,10 vxor 15,4,21 vxor 8,8,11 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vperm 5,5,5,6 vand 11,11,10 vxor 16,5,22 vxor 8,8,11 vxor 31,31,23 mtctr 9 b .Loop_xts_dec6x .align 5 .Loop_xts_dec6x: .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 lvx 25,3,7 bdnz .Loop_xts_dec6x subic 5,5,96 vxor 0,17,31 .long 0x10E7C548 .long 0x118CC548 vsrab 11,8,9 vxor 17,8,23 vaddubm 8,8,8 .long 0x11ADC548 .long 0x11CEC548 vsldoi 11,11,11,15 .long 0x11EFC548 .long 0x1210C548 subfe. 0,0,0 vand 11,11,10 .long 0x10E7CD48 .long 0x118CCD48 vxor 8,8,11 .long 0x11ADCD48 .long 0x11CECD48 vxor 1,18,31 vsrab 11,8,9 vxor 18,8,23 .long 0x11EFCD48 .long 0x1210CD48 and 0,0,5 vaddubm 8,8,8 vsldoi 11,11,11,15 .long 0x10E7D548 .long 0x118CD548 vand 11,11,10 .long 0x11ADD548 .long 0x11CED548 vxor 8,8,11 .long 0x11EFD548 .long 0x1210D548 add 10,10,0 vxor 2,19,31 vsrab 11,8,9 vxor 19,8,23 vaddubm 8,8,8 .long 0x10E7DD48 .long 0x118CDD48 vsldoi 11,11,11,15 .long 0x11ADDD48 .long 0x11CEDD48 vand 11,11,10 .long 0x11EFDD48 .long 0x1210DD48 addi 7,1,79 vxor 8,8,11 .long 0x10E7E548 .long 0x118CE548 vxor 3,20,31 vsrab 11,8,9 vxor 20,8,23 .long 0x11ADE548 .long 0x11CEE548 vaddubm 8,8,8 vsldoi 11,11,11,15 .long 0x11EFE548 .long 0x1210E548 lvx 24,0,7 vand 11,11,10 .long 0x10E7ED48 .long 0x118CED48 vxor 8,8,11 .long 0x11ADED48 .long 0x11CEED48 vxor 4,21,31 vsrab 11,8,9 vxor 21,8,23 .long 0x11EFED48 .long 0x1210ED48 lvx 25,3,7 vaddubm 8,8,8 vsldoi 11,11,11,15 .long 0x10E7F548 .long 0x118CF548 vand 11,11,10 .long 0x11ADF548 .long 0x11CEF548 vxor 8,8,11 .long 0x11EFF548 .long 0x1210F548 vxor 5,22,31 vsrab 11,8,9 vxor 22,8,23 .long 0x10E70549 .long 0x7C005699 vaddubm 8,8,8 vsldoi 11,11,11,15 .long 0x118C0D49 .long 0x7C235699 .long 0x11AD1549 vperm 0,0,0,6 .long 0x7C5A5699 vand 11,11,10 .long 0x11CE1D49 vperm 1,1,1,6 .long 0x7C7B5699 .long 0x11EF2549 vperm 2,2,2,6 .long 0x7C9C5699 vxor 8,8,11 .long 0x12102D49 vperm 3,3,3,6 .long 0x7CBD5699 addi 10,10,0x60 vperm 4,4,4,6 vperm 5,5,5,6 vperm 7,7,7,6 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,17 vperm 13,13,13,6 .long 0x7D832799 vxor 12,1,18 vperm 14,14,14,6 .long 0x7DBA2799 vxor 13,2,19 vperm 15,15,15,6 .long 0x7DDB2799 vxor 14,3,20 vperm 16,16,16,6 .long 0x7DFC2799 vxor 15,4,21 .long 0x7E1D2799 vxor 16,5,22 addi 4,4,0x60 mtctr 9 beq .Loop_xts_dec6x addic. 5,5,0x60 beq .Lxts_dec6x_zero cmpwi 5,0x20 blt .Lxts_dec6x_one nop beq .Lxts_dec6x_two cmpwi 5,0x40 blt .Lxts_dec6x_three nop beq .Lxts_dec6x_four .Lxts_dec6x_five: vxor 7,1,17 vxor 12,2,18 vxor 13,3,19 vxor 14,4,20 vxor 15,5,21 bl _aesp8_xts_dec5x vperm 7,7,7,6 vor 17,22,22 vxor 18,8,23 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,18 vperm 13,13,13,6 .long 0x7D832799 vperm 14,14,14,6 .long 0x7DBA2799 vperm 15,15,15,6 .long 0x7DDB2799 .long 0x7DFC2799 addi 4,4,0x50 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_four: vxor 7,2,17 vxor 12,3,18 vxor 13,4,19 vxor 14,5,20 vxor 15,15,15 bl _aesp8_xts_dec5x vperm 7,7,7,6 vor 17,21,21 vor 18,22,22 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,22 vperm 13,13,13,6 .long 0x7D832799 vperm 14,14,14,6 .long 0x7DBA2799 .long 0x7DDB2799 addi 4,4,0x40 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_three: vxor 7,3,17 vxor 12,4,18 vxor 13,5,19 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_dec5x vperm 7,7,7,6 vor 17,20,20 vor 18,21,21 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,21 vperm 13,13,13,6 .long 0x7D832799 .long 0x7DBA2799 addi 4,4,0x30 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_two: vxor 7,4,17 vxor 12,5,18 vxor 13,13,13 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_dec5x vperm 7,7,7,6 vor 17,19,19 vor 18,20,20 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,20 .long 0x7D832799 addi 4,4,0x20 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_one: vxor 7,5,17 nop .Loop_xts_dec1x: .long 0x10E7C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 lvx 25,3,7 bdnz .Loop_xts_dec1x subi 0,31,1 .long 0x10E7C548 andi. 0,0,16 cmpwi 31,0 .long 0x10E7CD48 sub 10,10,0 .long 0x10E7D548 .long 0x7C005699 .long 0x10E7DD48 addi 7,1,79 .long 0x10E7E548 lvx 24,0,7 .long 0x10E7ED48 lvx 25,3,7 vxor 17,17,31 vperm 0,0,0,6 .long 0x10E7F548 mtctr 9 .long 0x10E78D49 vor 17,18,18 vor 18,19,19 vperm 7,7,7,6 .long 0x7CE02799 addi 4,4,0x10 vxor 7,0,19 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_zero: cmpwi 31,0 beq .Lxts_dec6x_done .long 0x7C005699 vperm 0,0,0,6 vxor 7,0,18 .Lxts_dec6x_steal: .long 0x10E7C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 lvx 25,3,7 bdnz .Lxts_dec6x_steal add 10,10,31 .long 0x10E7C548 cmpwi 31,0 .long 0x10E7CD48 .long 0x7C005699 .long 0x10E7D548 lvsr 5,0,31 .long 0x10E7DD48 addi 7,1,79 .long 0x10E7E548 lvx 24,0,7 .long 0x10E7ED48 lvx 25,3,7 vxor 18,18,31 vperm 0,0,0,6 .long 0x10E7F548 vperm 0,0,0,5 .long 0x11679549 vperm 7,11,11,6 .long 0x7CE02799 vxor 7,7,7 vspltisb 12,-1 vperm 7,7,12,5 vsel 7,0,11,7 vxor 7,7,17 subi 30,4,1 mtctr 31 .Loop_xts_dec6x_steal: lbzu 0,1(30) stb 0,16(30) bdnz .Loop_xts_dec6x_steal li 31,0 mtctr 9 b .Loop_xts_dec1x .align 4 .Lxts_dec6x_done: cmpldi 8,0 beq .Lxts_dec6x_ret vxor 8,17,23 vperm 8,8,8,6 .long 0x7D004799 .Lxts_dec6x_ret: mtlr 11 li 10,79 li 11,95 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,1,0x80,6,6,0 .long 0 .align 5 _aesp8_xts_dec5x: .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 lvx 25,3,7 bdnz _aesp8_xts_dec5x subi 0,31,1 .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 andi. 0,0,16 cmpwi 31,0 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 vxor 17,17,31 sub 10,10,0 .long 0x10E7D548 .long 0x118CD548 .long 0x11ADD548 .long 0x11CED548 .long 0x11EFD548 vxor 1,18,31 .long 0x10E7DD48 .long 0x7C005699 .long 0x118CDD48 .long 0x11ADDD48 .long 0x11CEDD48 .long 0x11EFDD48 vxor 2,19,31 addi 7,1,79 .long 0x10E7E548 .long 0x118CE548 .long 0x11ADE548 .long 0x11CEE548 .long 0x11EFE548 lvx 24,0,7 vxor 3,20,31 .long 0x10E7ED48 vperm 0,0,0,6 .long 0x118CED48 .long 0x11ADED48 .long 0x11CEED48 .long 0x11EFED48 lvx 25,3,7 vxor 4,21,31 .long 0x10E7F548 .long 0x118CF548 .long 0x11ADF548 .long 0x11CEF548 .long 0x11EFF548 .long 0x10E78D49 .long 0x118C0D49 .long 0x11AD1549 .long 0x11CE1D49 .long 0x11EF2549 mtctr 9 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 #endif // !OPENSSL_NO_ASM && __powerpc64__ && __ELF__ #if defined(__ELF__) // See https://www.airs.com/blog/archives/518. .section .note.GNU-stack,"",%progbits #endif
marvin-hansen/iggy-streaming-system
7,965
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-ppc64le/crypto/fipsmodule/ghashp8-ppc.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(__powerpc64__) && defined(__ELF__) #include <openssl/boringssl_prefix_symbols_asm.h> .machine "any" .abiversion 2 .text .globl gcm_init_p8 .type gcm_init_p8,@function .align 5 gcm_init_p8: .localentry gcm_init_p8,0 li 0,-4096 li 8,0x10 li 12,-1 li 9,0x20 or 0,0,0 li 10,0x30 .long 0x7D202699 vspltisb 8,-16 vspltisb 5,1 vaddubm 8,8,8 vxor 4,4,4 vor 8,8,5 vsldoi 8,8,4,15 vsldoi 6,4,5,1 vaddubm 8,8,8 vspltisb 7,7 vor 8,8,6 vspltb 6,9,0 vsl 9,9,5 vsrab 6,6,7 vand 6,6,8 vxor 3,9,6 vsldoi 9,3,3,8 vsldoi 8,4,8,8 vsldoi 11,4,9,8 vsldoi 10,9,4,8 .long 0x7D001F99 .long 0x7D681F99 li 8,0x40 .long 0x7D291F99 li 9,0x50 .long 0x7D4A1F99 li 10,0x60 .long 0x10035CC8 .long 0x10234CC8 .long 0x104354C8 .long 0x10E044C8 vsldoi 5,1,4,8 vsldoi 6,4,1,8 vxor 0,0,5 vxor 2,2,6 vsldoi 0,0,0,8 vxor 0,0,7 vsldoi 6,0,0,8 .long 0x100044C8 vxor 6,6,2 vxor 16,0,6 vsldoi 17,16,16,8 vsldoi 19,4,17,8 vsldoi 18,17,4,8 .long 0x7E681F99 li 8,0x70 .long 0x7E291F99 li 9,0x80 .long 0x7E4A1F99 li 10,0x90 .long 0x10039CC8 .long 0x11B09CC8 .long 0x10238CC8 .long 0x11D08CC8 .long 0x104394C8 .long 0x11F094C8 .long 0x10E044C8 .long 0x114D44C8 vsldoi 5,1,4,8 vsldoi 6,4,1,8 vsldoi 11,14,4,8 vsldoi 9,4,14,8 vxor 0,0,5 vxor 2,2,6 vxor 13,13,11 vxor 15,15,9 vsldoi 0,0,0,8 vsldoi 13,13,13,8 vxor 0,0,7 vxor 13,13,10 vsldoi 6,0,0,8 vsldoi 9,13,13,8 .long 0x100044C8 .long 0x11AD44C8 vxor 6,6,2 vxor 9,9,15 vxor 0,0,6 vxor 13,13,9 vsldoi 9,0,0,8 vsldoi 17,13,13,8 vsldoi 11,4,9,8 vsldoi 10,9,4,8 vsldoi 19,4,17,8 vsldoi 18,17,4,8 .long 0x7D681F99 li 8,0xa0 .long 0x7D291F99 li 9,0xb0 .long 0x7D4A1F99 li 10,0xc0 .long 0x7E681F99 .long 0x7E291F99 .long 0x7E4A1F99 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,2,0 .long 0 .size gcm_init_p8,.-gcm_init_p8 .globl gcm_gmult_p8 .type gcm_gmult_p8,@function .align 5 gcm_gmult_p8: .localentry gcm_gmult_p8,0 lis 0,0xfff8 li 8,0x10 li 12,-1 li 9,0x20 or 0,0,0 li 10,0x30 .long 0x7C601E99 .long 0x7D682699 lvsl 12,0,0 .long 0x7D292699 vspltisb 5,0x07 .long 0x7D4A2699 vxor 12,12,5 .long 0x7D002699 vperm 3,3,3,12 vxor 4,4,4 .long 0x10035CC8 .long 0x10234CC8 .long 0x104354C8 .long 0x10E044C8 vsldoi 5,1,4,8 vsldoi 6,4,1,8 vxor 0,0,5 vxor 2,2,6 vsldoi 0,0,0,8 vxor 0,0,7 vsldoi 6,0,0,8 .long 0x100044C8 vxor 6,6,2 vxor 0,0,6 vperm 0,0,0,12 .long 0x7C001F99 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,2,0 .long 0 .size gcm_gmult_p8,.-gcm_gmult_p8 .globl gcm_ghash_p8 .type gcm_ghash_p8,@function .align 5 gcm_ghash_p8: .localentry gcm_ghash_p8,0 li 0,-4096 li 8,0x10 li 12,-1 li 9,0x20 or 0,0,0 li 10,0x30 .long 0x7C001E99 .long 0x7D682699 li 8,0x40 lvsl 12,0,0 .long 0x7D292699 li 9,0x50 vspltisb 5,0x07 .long 0x7D4A2699 li 10,0x60 vxor 12,12,5 .long 0x7D002699 vperm 0,0,0,12 vxor 4,4,4 cmpldi 6,64 bge .Lgcm_ghash_p8_4x .long 0x7C602E99 addi 5,5,16 subic. 6,6,16 vperm 3,3,3,12 vxor 3,3,0 beq .Lshort .long 0x7E682699 li 8,16 .long 0x7E292699 add 9,5,6 .long 0x7E4A2699 .align 5 .Loop_2x: .long 0x7E002E99 vperm 16,16,16,12 subic 6,6,32 .long 0x10039CC8 .long 0x11B05CC8 subfe 0,0,0 .long 0x10238CC8 .long 0x11D04CC8 and 0,0,6 .long 0x104394C8 .long 0x11F054C8 add 5,5,0 vxor 0,0,13 vxor 1,1,14 .long 0x10E044C8 vsldoi 5,1,4,8 vsldoi 6,4,1,8 vxor 2,2,15 vxor 0,0,5 vxor 2,2,6 vsldoi 0,0,0,8 vxor 0,0,7 .long 0x7C682E99 addi 5,5,32 vsldoi 6,0,0,8 .long 0x100044C8 vperm 3,3,3,12 vxor 6,6,2 vxor 3,3,6 vxor 3,3,0 cmpld 9,5 bgt .Loop_2x cmplwi 6,0 bne .Leven .Lshort: .long 0x10035CC8 .long 0x10234CC8 .long 0x104354C8 .long 0x10E044C8 vsldoi 5,1,4,8 vsldoi 6,4,1,8 vxor 0,0,5 vxor 2,2,6 vsldoi 0,0,0,8 vxor 0,0,7 vsldoi 6,0,0,8 .long 0x100044C8 vxor 6,6,2 .Leven: vxor 0,0,6 vperm 0,0,0,12 .long 0x7C001F99 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,4,0 .long 0 .align 5 .gcm_ghash_p8_4x: .Lgcm_ghash_p8_4x: stdu 1,-256(1) li 10,63 li 11,79 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 li 10,0x60 stvx 31,11,1 li 0,-1 stw 12,252(1) or 0,0,0 lvsl 5,0,8 li 8,0x70 .long 0x7E292699 li 9,0x80 vspltisb 6,8 li 10,0x90 .long 0x7EE82699 li 8,0xa0 .long 0x7F092699 li 9,0xb0 .long 0x7F2A2699 li 10,0xc0 .long 0x7FA82699 li 8,0x10 .long 0x7FC92699 li 9,0x20 .long 0x7FEA2699 li 10,0x30 vsldoi 7,4,6,8 vaddubm 18,5,7 vaddubm 19,6,18 srdi 6,6,4 .long 0x7C602E99 .long 0x7E082E99 subic. 6,6,8 .long 0x7EC92E99 .long 0x7F8A2E99 addi 5,5,0x40 vperm 3,3,3,12 vperm 16,16,16,12 vperm 22,22,22,12 vperm 28,28,28,12 vxor 2,3,0 .long 0x11B0BCC8 .long 0x11D0C4C8 .long 0x11F0CCC8 vperm 11,17,9,18 vperm 5,22,28,19 vperm 10,17,9,19 vperm 6,22,28,18 .long 0x12B68CC8 .long 0x12855CC8 .long 0x137C4CC8 .long 0x134654C8 vxor 21,21,14 vxor 20,20,13 vxor 27,27,21 vxor 26,26,15 blt .Ltail_4x .Loop_4x: .long 0x7C602E99 .long 0x7E082E99 subic. 6,6,4 .long 0x7EC92E99 .long 0x7F8A2E99 addi 5,5,0x40 vperm 16,16,16,12 vperm 22,22,22,12 vperm 28,28,28,12 vperm 3,3,3,12 .long 0x1002ECC8 .long 0x1022F4C8 .long 0x1042FCC8 .long 0x11B0BCC8 .long 0x11D0C4C8 .long 0x11F0CCC8 vxor 0,0,20 vxor 1,1,27 vxor 2,2,26 vperm 5,22,28,19 vperm 6,22,28,18 .long 0x10E044C8 .long 0x12855CC8 .long 0x134654C8 vsldoi 5,1,4,8 vsldoi 6,4,1,8 vxor 0,0,5 vxor 2,2,6 vsldoi 0,0,0,8 vxor 0,0,7 vsldoi 6,0,0,8 .long 0x12B68CC8 .long 0x137C4CC8 .long 0x100044C8 vxor 20,20,13 vxor 26,26,15 vxor 2,2,3 vxor 21,21,14 vxor 2,2,6 vxor 27,27,21 vxor 2,2,0 bge .Loop_4x .Ltail_4x: .long 0x1002ECC8 .long 0x1022F4C8 .long 0x1042FCC8 vxor 0,0,20 vxor 1,1,27 .long 0x10E044C8 vsldoi 5,1,4,8 vsldoi 6,4,1,8 vxor 2,2,26 vxor 0,0,5 vxor 2,2,6 vsldoi 0,0,0,8 vxor 0,0,7 vsldoi 6,0,0,8 .long 0x100044C8 vxor 6,6,2 vxor 0,0,6 addic. 6,6,4 beq .Ldone_4x .long 0x7C602E99 cmpldi 6,2 li 6,-4 blt .Lone .long 0x7E082E99 beq .Ltwo .Lthree: .long 0x7EC92E99 vperm 3,3,3,12 vperm 16,16,16,12 vperm 22,22,22,12 vxor 2,3,0 vor 29,23,23 vor 30, 24, 24 vor 31,25,25 vperm 5,16,22,19 vperm 6,16,22,18 .long 0x12B08CC8 .long 0x13764CC8 .long 0x12855CC8 .long 0x134654C8 vxor 27,27,21 b .Ltail_4x .align 4 .Ltwo: vperm 3,3,3,12 vperm 16,16,16,12 vxor 2,3,0 vperm 5,4,16,19 vperm 6,4,16,18 vsldoi 29,4,17,8 vor 30, 17, 17 vsldoi 31,17,4,8 .long 0x12855CC8 .long 0x13704CC8 .long 0x134654C8 b .Ltail_4x .align 4 .Lone: vperm 3,3,3,12 vsldoi 29,4,9,8 vor 30, 9, 9 vsldoi 31,9,4,8 vxor 2,3,0 vxor 20,20,20 vxor 27,27,27 vxor 26,26,26 b .Ltail_4x .Ldone_4x: vperm 0,0,0,12 .long 0x7C001F99 li 10,63 li 11,79 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 addi 1,1,256 blr .long 0 .byte 0,12,0x04,0,0x80,0,4,0 .long 0 .size gcm_ghash_p8,.-gcm_ghash_p8 .byte 71,72,65,83,72,32,102,111,114,32,80,111,119,101,114,73,83,65,32,50,46,48,55,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif // !OPENSSL_NO_ASM && __powerpc64__ && __ELF__ #if defined(__ELF__) // See https://www.airs.com/blog/archives/518. .section .note.GNU-stack,"",%progbits #endif
marvin-hansen/iggy-streaming-system
13,678
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/test/trampoline-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .text // abi_test_trampoline loads callee-saved registers from |state|, calls |func| // with |argv|, then saves the callee-saved registers into |state|. It returns // the result of |func|. The |unwind| argument is unused. // uint64_t abi_test_trampoline(void (*func)(...), CallerState *state, // const uint64_t *argv, size_t argc, // uint64_t unwind); .globl _abi_test_trampoline .private_extern _abi_test_trampoline .align 4 _abi_test_trampoline: Labi_test_trampoline_begin: AARCH64_SIGN_LINK_REGISTER // Stack layout (low to high addresses) // x29,x30 (16 bytes) // d8-d15 (64 bytes) // x19-x28 (80 bytes) // x1 (8 bytes) // padding (8 bytes) stp x29, x30, [sp, #-176]! mov x29, sp // Saved callee-saved registers and |state|. stp d8, d9, [sp, #16] stp d10, d11, [sp, #32] stp d12, d13, [sp, #48] stp d14, d15, [sp, #64] stp x19, x20, [sp, #80] stp x21, x22, [sp, #96] stp x23, x24, [sp, #112] stp x25, x26, [sp, #128] stp x27, x28, [sp, #144] str x1, [sp, #160] // Load registers from |state|, with the exception of x29. x29 is the // frame pointer and also callee-saved, but AAPCS64 allows platforms to // mandate that x29 always point to a frame. iOS64 does so, which means // we cannot fill x29 with entropy without violating ABI rules // ourselves. x29 is tested separately below. ldp d8, d9, [x1], #16 ldp d10, d11, [x1], #16 ldp d12, d13, [x1], #16 ldp d14, d15, [x1], #16 ldp x19, x20, [x1], #16 ldp x21, x22, [x1], #16 ldp x23, x24, [x1], #16 ldp x25, x26, [x1], #16 ldp x27, x28, [x1], #16 // Move parameters into temporary registers. mov x9, x0 mov x10, x2 mov x11, x3 // Load parameters into registers. cbz x11, Largs_done ldr x0, [x10], #8 subs x11, x11, #1 b.eq Largs_done ldr x1, [x10], #8 subs x11, x11, #1 b.eq Largs_done ldr x2, [x10], #8 subs x11, x11, #1 b.eq Largs_done ldr x3, [x10], #8 subs x11, x11, #1 b.eq Largs_done ldr x4, [x10], #8 subs x11, x11, #1 b.eq Largs_done ldr x5, [x10], #8 subs x11, x11, #1 b.eq Largs_done ldr x6, [x10], #8 subs x11, x11, #1 b.eq Largs_done ldr x7, [x10], #8 Largs_done: blr x9 // Reload |state| and store registers. ldr x1, [sp, #160] stp d8, d9, [x1], #16 stp d10, d11, [x1], #16 stp d12, d13, [x1], #16 stp d14, d15, [x1], #16 stp x19, x20, [x1], #16 stp x21, x22, [x1], #16 stp x23, x24, [x1], #16 stp x25, x26, [x1], #16 stp x27, x28, [x1], #16 // |func| is required to preserve x29, the frame pointer. We cannot load // random values into x29 (see comment above), so compare it against the // expected value and zero the field of |state| if corrupted. mov x9, sp cmp x29, x9 b.eq Lx29_ok str xzr, [x1] Lx29_ok: // Restore callee-saved registers. ldp d8, d9, [sp, #16] ldp d10, d11, [sp, #32] ldp d12, d13, [sp, #48] ldp d14, d15, [sp, #64] ldp x19, x20, [sp, #80] ldp x21, x22, [sp, #96] ldp x23, x24, [sp, #112] ldp x25, x26, [sp, #128] ldp x27, x28, [sp, #144] ldp x29, x30, [sp], #176 AARCH64_VALIDATE_LINK_REGISTER ret .globl _abi_test_clobber_x0 .private_extern _abi_test_clobber_x0 .align 4 _abi_test_clobber_x0: AARCH64_VALID_CALL_TARGET mov x0, xzr ret .globl _abi_test_clobber_x1 .private_extern _abi_test_clobber_x1 .align 4 _abi_test_clobber_x1: AARCH64_VALID_CALL_TARGET mov x1, xzr ret .globl _abi_test_clobber_x2 .private_extern _abi_test_clobber_x2 .align 4 _abi_test_clobber_x2: AARCH64_VALID_CALL_TARGET mov x2, xzr ret .globl _abi_test_clobber_x3 .private_extern _abi_test_clobber_x3 .align 4 _abi_test_clobber_x3: AARCH64_VALID_CALL_TARGET mov x3, xzr ret .globl _abi_test_clobber_x4 .private_extern _abi_test_clobber_x4 .align 4 _abi_test_clobber_x4: AARCH64_VALID_CALL_TARGET mov x4, xzr ret .globl _abi_test_clobber_x5 .private_extern _abi_test_clobber_x5 .align 4 _abi_test_clobber_x5: AARCH64_VALID_CALL_TARGET mov x5, xzr ret .globl _abi_test_clobber_x6 .private_extern _abi_test_clobber_x6 .align 4 _abi_test_clobber_x6: AARCH64_VALID_CALL_TARGET mov x6, xzr ret .globl _abi_test_clobber_x7 .private_extern _abi_test_clobber_x7 .align 4 _abi_test_clobber_x7: AARCH64_VALID_CALL_TARGET mov x7, xzr ret .globl _abi_test_clobber_x8 .private_extern _abi_test_clobber_x8 .align 4 _abi_test_clobber_x8: AARCH64_VALID_CALL_TARGET mov x8, xzr ret .globl _abi_test_clobber_x9 .private_extern _abi_test_clobber_x9 .align 4 _abi_test_clobber_x9: AARCH64_VALID_CALL_TARGET mov x9, xzr ret .globl _abi_test_clobber_x10 .private_extern _abi_test_clobber_x10 .align 4 _abi_test_clobber_x10: AARCH64_VALID_CALL_TARGET mov x10, xzr ret .globl _abi_test_clobber_x11 .private_extern _abi_test_clobber_x11 .align 4 _abi_test_clobber_x11: AARCH64_VALID_CALL_TARGET mov x11, xzr ret .globl _abi_test_clobber_x12 .private_extern _abi_test_clobber_x12 .align 4 _abi_test_clobber_x12: AARCH64_VALID_CALL_TARGET mov x12, xzr ret .globl _abi_test_clobber_x13 .private_extern _abi_test_clobber_x13 .align 4 _abi_test_clobber_x13: AARCH64_VALID_CALL_TARGET mov x13, xzr ret .globl _abi_test_clobber_x14 .private_extern _abi_test_clobber_x14 .align 4 _abi_test_clobber_x14: AARCH64_VALID_CALL_TARGET mov x14, xzr ret .globl _abi_test_clobber_x15 .private_extern _abi_test_clobber_x15 .align 4 _abi_test_clobber_x15: AARCH64_VALID_CALL_TARGET mov x15, xzr ret .globl _abi_test_clobber_x16 .private_extern _abi_test_clobber_x16 .align 4 _abi_test_clobber_x16: AARCH64_VALID_CALL_TARGET mov x16, xzr ret .globl _abi_test_clobber_x17 .private_extern _abi_test_clobber_x17 .align 4 _abi_test_clobber_x17: AARCH64_VALID_CALL_TARGET mov x17, xzr ret .globl _abi_test_clobber_x19 .private_extern _abi_test_clobber_x19 .align 4 _abi_test_clobber_x19: AARCH64_VALID_CALL_TARGET mov x19, xzr ret .globl _abi_test_clobber_x20 .private_extern _abi_test_clobber_x20 .align 4 _abi_test_clobber_x20: AARCH64_VALID_CALL_TARGET mov x20, xzr ret .globl _abi_test_clobber_x21 .private_extern _abi_test_clobber_x21 .align 4 _abi_test_clobber_x21: AARCH64_VALID_CALL_TARGET mov x21, xzr ret .globl _abi_test_clobber_x22 .private_extern _abi_test_clobber_x22 .align 4 _abi_test_clobber_x22: AARCH64_VALID_CALL_TARGET mov x22, xzr ret .globl _abi_test_clobber_x23 .private_extern _abi_test_clobber_x23 .align 4 _abi_test_clobber_x23: AARCH64_VALID_CALL_TARGET mov x23, xzr ret .globl _abi_test_clobber_x24 .private_extern _abi_test_clobber_x24 .align 4 _abi_test_clobber_x24: AARCH64_VALID_CALL_TARGET mov x24, xzr ret .globl _abi_test_clobber_x25 .private_extern _abi_test_clobber_x25 .align 4 _abi_test_clobber_x25: AARCH64_VALID_CALL_TARGET mov x25, xzr ret .globl _abi_test_clobber_x26 .private_extern _abi_test_clobber_x26 .align 4 _abi_test_clobber_x26: AARCH64_VALID_CALL_TARGET mov x26, xzr ret .globl _abi_test_clobber_x27 .private_extern _abi_test_clobber_x27 .align 4 _abi_test_clobber_x27: AARCH64_VALID_CALL_TARGET mov x27, xzr ret .globl _abi_test_clobber_x28 .private_extern _abi_test_clobber_x28 .align 4 _abi_test_clobber_x28: AARCH64_VALID_CALL_TARGET mov x28, xzr ret .globl _abi_test_clobber_x29 .private_extern _abi_test_clobber_x29 .align 4 _abi_test_clobber_x29: AARCH64_VALID_CALL_TARGET mov x29, xzr ret .globl _abi_test_clobber_d0 .private_extern _abi_test_clobber_d0 .align 4 _abi_test_clobber_d0: AARCH64_VALID_CALL_TARGET fmov d0, xzr ret .globl _abi_test_clobber_d1 .private_extern _abi_test_clobber_d1 .align 4 _abi_test_clobber_d1: AARCH64_VALID_CALL_TARGET fmov d1, xzr ret .globl _abi_test_clobber_d2 .private_extern _abi_test_clobber_d2 .align 4 _abi_test_clobber_d2: AARCH64_VALID_CALL_TARGET fmov d2, xzr ret .globl _abi_test_clobber_d3 .private_extern _abi_test_clobber_d3 .align 4 _abi_test_clobber_d3: AARCH64_VALID_CALL_TARGET fmov d3, xzr ret .globl _abi_test_clobber_d4 .private_extern _abi_test_clobber_d4 .align 4 _abi_test_clobber_d4: AARCH64_VALID_CALL_TARGET fmov d4, xzr ret .globl _abi_test_clobber_d5 .private_extern _abi_test_clobber_d5 .align 4 _abi_test_clobber_d5: AARCH64_VALID_CALL_TARGET fmov d5, xzr ret .globl _abi_test_clobber_d6 .private_extern _abi_test_clobber_d6 .align 4 _abi_test_clobber_d6: AARCH64_VALID_CALL_TARGET fmov d6, xzr ret .globl _abi_test_clobber_d7 .private_extern _abi_test_clobber_d7 .align 4 _abi_test_clobber_d7: AARCH64_VALID_CALL_TARGET fmov d7, xzr ret .globl _abi_test_clobber_d8 .private_extern _abi_test_clobber_d8 .align 4 _abi_test_clobber_d8: AARCH64_VALID_CALL_TARGET fmov d8, xzr ret .globl _abi_test_clobber_d9 .private_extern _abi_test_clobber_d9 .align 4 _abi_test_clobber_d9: AARCH64_VALID_CALL_TARGET fmov d9, xzr ret .globl _abi_test_clobber_d10 .private_extern _abi_test_clobber_d10 .align 4 _abi_test_clobber_d10: AARCH64_VALID_CALL_TARGET fmov d10, xzr ret .globl _abi_test_clobber_d11 .private_extern _abi_test_clobber_d11 .align 4 _abi_test_clobber_d11: AARCH64_VALID_CALL_TARGET fmov d11, xzr ret .globl _abi_test_clobber_d12 .private_extern _abi_test_clobber_d12 .align 4 _abi_test_clobber_d12: AARCH64_VALID_CALL_TARGET fmov d12, xzr ret .globl _abi_test_clobber_d13 .private_extern _abi_test_clobber_d13 .align 4 _abi_test_clobber_d13: AARCH64_VALID_CALL_TARGET fmov d13, xzr ret .globl _abi_test_clobber_d14 .private_extern _abi_test_clobber_d14 .align 4 _abi_test_clobber_d14: AARCH64_VALID_CALL_TARGET fmov d14, xzr ret .globl _abi_test_clobber_d15 .private_extern _abi_test_clobber_d15 .align 4 _abi_test_clobber_d15: AARCH64_VALID_CALL_TARGET fmov d15, xzr ret .globl _abi_test_clobber_d16 .private_extern _abi_test_clobber_d16 .align 4 _abi_test_clobber_d16: AARCH64_VALID_CALL_TARGET fmov d16, xzr ret .globl _abi_test_clobber_d17 .private_extern _abi_test_clobber_d17 .align 4 _abi_test_clobber_d17: AARCH64_VALID_CALL_TARGET fmov d17, xzr ret .globl _abi_test_clobber_d18 .private_extern _abi_test_clobber_d18 .align 4 _abi_test_clobber_d18: AARCH64_VALID_CALL_TARGET fmov d18, xzr ret .globl _abi_test_clobber_d19 .private_extern _abi_test_clobber_d19 .align 4 _abi_test_clobber_d19: AARCH64_VALID_CALL_TARGET fmov d19, xzr ret .globl _abi_test_clobber_d20 .private_extern _abi_test_clobber_d20 .align 4 _abi_test_clobber_d20: AARCH64_VALID_CALL_TARGET fmov d20, xzr ret .globl _abi_test_clobber_d21 .private_extern _abi_test_clobber_d21 .align 4 _abi_test_clobber_d21: AARCH64_VALID_CALL_TARGET fmov d21, xzr ret .globl _abi_test_clobber_d22 .private_extern _abi_test_clobber_d22 .align 4 _abi_test_clobber_d22: AARCH64_VALID_CALL_TARGET fmov d22, xzr ret .globl _abi_test_clobber_d23 .private_extern _abi_test_clobber_d23 .align 4 _abi_test_clobber_d23: AARCH64_VALID_CALL_TARGET fmov d23, xzr ret .globl _abi_test_clobber_d24 .private_extern _abi_test_clobber_d24 .align 4 _abi_test_clobber_d24: AARCH64_VALID_CALL_TARGET fmov d24, xzr ret .globl _abi_test_clobber_d25 .private_extern _abi_test_clobber_d25 .align 4 _abi_test_clobber_d25: AARCH64_VALID_CALL_TARGET fmov d25, xzr ret .globl _abi_test_clobber_d26 .private_extern _abi_test_clobber_d26 .align 4 _abi_test_clobber_d26: AARCH64_VALID_CALL_TARGET fmov d26, xzr ret .globl _abi_test_clobber_d27 .private_extern _abi_test_clobber_d27 .align 4 _abi_test_clobber_d27: AARCH64_VALID_CALL_TARGET fmov d27, xzr ret .globl _abi_test_clobber_d28 .private_extern _abi_test_clobber_d28 .align 4 _abi_test_clobber_d28: AARCH64_VALID_CALL_TARGET fmov d28, xzr ret .globl _abi_test_clobber_d29 .private_extern _abi_test_clobber_d29 .align 4 _abi_test_clobber_d29: AARCH64_VALID_CALL_TARGET fmov d29, xzr ret .globl _abi_test_clobber_d30 .private_extern _abi_test_clobber_d30 .align 4 _abi_test_clobber_d30: AARCH64_VALID_CALL_TARGET fmov d30, xzr ret .globl _abi_test_clobber_d31 .private_extern _abi_test_clobber_d31 .align 4 _abi_test_clobber_d31: AARCH64_VALID_CALL_TARGET fmov d31, xzr ret .globl _abi_test_clobber_v8_upper .private_extern _abi_test_clobber_v8_upper .align 4 _abi_test_clobber_v8_upper: AARCH64_VALID_CALL_TARGET fmov v8.d[1], xzr ret .globl _abi_test_clobber_v9_upper .private_extern _abi_test_clobber_v9_upper .align 4 _abi_test_clobber_v9_upper: AARCH64_VALID_CALL_TARGET fmov v9.d[1], xzr ret .globl _abi_test_clobber_v10_upper .private_extern _abi_test_clobber_v10_upper .align 4 _abi_test_clobber_v10_upper: AARCH64_VALID_CALL_TARGET fmov v10.d[1], xzr ret .globl _abi_test_clobber_v11_upper .private_extern _abi_test_clobber_v11_upper .align 4 _abi_test_clobber_v11_upper: AARCH64_VALID_CALL_TARGET fmov v11.d[1], xzr ret .globl _abi_test_clobber_v12_upper .private_extern _abi_test_clobber_v12_upper .align 4 _abi_test_clobber_v12_upper: AARCH64_VALID_CALL_TARGET fmov v12.d[1], xzr ret .globl _abi_test_clobber_v13_upper .private_extern _abi_test_clobber_v13_upper .align 4 _abi_test_clobber_v13_upper: AARCH64_VALID_CALL_TARGET fmov v13.d[1], xzr ret .globl _abi_test_clobber_v14_upper .private_extern _abi_test_clobber_v14_upper .align 4 _abi_test_clobber_v14_upper: AARCH64_VALID_CALL_TARGET fmov v14.d[1], xzr ret .globl _abi_test_clobber_v15_upper .private_extern _abi_test_clobber_v15_upper .align 4 _abi_test_clobber_v15_upper: AARCH64_VALID_CALL_TARGET fmov v15.d[1], xzr ret #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
47,072
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/aesv8-armx.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> #if __ARM_MAX_ARCH__>=7 .text .section __TEXT,__const .align 5 Lrcon: .long 0x01,0x01,0x01,0x01 .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d // rotate-n-splat .long 0x1b,0x1b,0x1b,0x1b .text .globl _aes_hw_set_encrypt_key .private_extern _aes_hw_set_encrypt_key .align 5 _aes_hw_set_encrypt_key: Lenc_key: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#3] // kFlag_aes_hw_set_encrypt_key #endif // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 mov x3,#-1 cmp x0,#0 b.eq Lenc_key_abort cmp x2,#0 b.eq Lenc_key_abort mov x3,#-2 cmp w1,#128 b.lt Lenc_key_abort cmp w1,#256 b.gt Lenc_key_abort tst w1,#0x3f b.ne Lenc_key_abort adrp x3,Lrcon@PAGE add x3,x3,Lrcon@PAGEOFF cmp w1,#192 eor v0.16b,v0.16b,v0.16b ld1 {v3.16b},[x0],#16 mov w1,#8 // reuse w1 ld1 {v1.4s,v2.4s},[x3],#32 b.lt Loop128 b.eq L192 b L256 .align 4 Loop128: tbl v6.16b,{v3.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v3.4s},[x2],#16 aese v6.16b,v0.16b subs w1,w1,#1 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v6.16b,v6.16b,v1.16b eor v3.16b,v3.16b,v5.16b shl v1.16b,v1.16b,#1 eor v3.16b,v3.16b,v6.16b b.ne Loop128 ld1 {v1.4s},[x3] tbl v6.16b,{v3.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v3.4s},[x2],#16 aese v6.16b,v0.16b eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v6.16b,v6.16b,v1.16b eor v3.16b,v3.16b,v5.16b shl v1.16b,v1.16b,#1 eor v3.16b,v3.16b,v6.16b tbl v6.16b,{v3.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v3.4s},[x2],#16 aese v6.16b,v0.16b eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v6.16b,v6.16b,v1.16b eor v3.16b,v3.16b,v5.16b eor v3.16b,v3.16b,v6.16b st1 {v3.4s},[x2] add x2,x2,#0x50 mov w12,#10 b Ldone .align 4 L192: ld1 {v4.8b},[x0],#8 movi v6.16b,#8 // borrow v6.16b st1 {v3.4s},[x2],#16 sub v2.16b,v2.16b,v6.16b // adjust the mask Loop192: tbl v6.16b,{v4.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v4.8b},[x2],#8 aese v6.16b,v0.16b subs w1,w1,#1 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b dup v5.4s,v3.s[3] eor v5.16b,v5.16b,v4.16b eor v6.16b,v6.16b,v1.16b ext v4.16b,v0.16b,v4.16b,#12 shl v1.16b,v1.16b,#1 eor v4.16b,v4.16b,v5.16b eor v3.16b,v3.16b,v6.16b eor v4.16b,v4.16b,v6.16b st1 {v3.4s},[x2],#16 b.ne Loop192 mov w12,#12 add x2,x2,#0x20 b Ldone .align 4 L256: ld1 {v4.16b},[x0] mov w1,#7 mov w12,#14 st1 {v3.4s},[x2],#16 Loop256: tbl v6.16b,{v4.16b},v2.16b ext v5.16b,v0.16b,v3.16b,#12 st1 {v4.4s},[x2],#16 aese v6.16b,v0.16b subs w1,w1,#1 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v3.16b,v3.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v6.16b,v6.16b,v1.16b eor v3.16b,v3.16b,v5.16b shl v1.16b,v1.16b,#1 eor v3.16b,v3.16b,v6.16b st1 {v3.4s},[x2],#16 b.eq Ldone dup v6.4s,v3.s[3] // just splat ext v5.16b,v0.16b,v4.16b,#12 aese v6.16b,v0.16b eor v4.16b,v4.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v4.16b,v4.16b,v5.16b ext v5.16b,v0.16b,v5.16b,#12 eor v4.16b,v4.16b,v5.16b eor v4.16b,v4.16b,v6.16b b Loop256 Ldone: str w12,[x2] mov x3,#0 Lenc_key_abort: mov x0,x3 // return value ldr x29,[sp],#16 ret .globl _aes_hw_set_decrypt_key .private_extern _aes_hw_set_decrypt_key .align 5 _aes_hw_set_decrypt_key: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 bl Lenc_key cmp x0,#0 b.ne Ldec_key_abort sub x2,x2,#240 // restore original x2 mov x4,#-16 add x0,x2,x12,lsl#4 // end of key schedule ld1 {v0.4s},[x2] ld1 {v1.4s},[x0] st1 {v0.4s},[x0],x4 st1 {v1.4s},[x2],#16 Loop_imc: ld1 {v0.4s},[x2] ld1 {v1.4s},[x0] aesimc v0.16b,v0.16b aesimc v1.16b,v1.16b st1 {v0.4s},[x0],x4 st1 {v1.4s},[x2],#16 cmp x0,x2 b.hi Loop_imc ld1 {v0.4s},[x2] aesimc v0.16b,v0.16b st1 {v0.4s},[x0] eor x0,x0,x0 // return value Ldec_key_abort: ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .globl _aes_hw_encrypt .private_extern _aes_hw_encrypt .align 5 _aes_hw_encrypt: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#1] // kFlag_aes_hw_encrypt #endif AARCH64_VALID_CALL_TARGET ldr w3,[x2,#240] ld1 {v0.4s},[x2],#16 ld1 {v2.16b},[x0] sub w3,w3,#2 ld1 {v1.4s},[x2],#16 Loop_enc: aese v2.16b,v0.16b aesmc v2.16b,v2.16b ld1 {v0.4s},[x2],#16 subs w3,w3,#2 aese v2.16b,v1.16b aesmc v2.16b,v2.16b ld1 {v1.4s},[x2],#16 b.gt Loop_enc aese v2.16b,v0.16b aesmc v2.16b,v2.16b ld1 {v0.4s},[x2] aese v2.16b,v1.16b eor v2.16b,v2.16b,v0.16b st1 {v2.16b},[x1] ret .globl _aes_hw_decrypt .private_extern _aes_hw_decrypt .align 5 _aes_hw_decrypt: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#1] // kFlag_aes_hw_encrypt #endif AARCH64_VALID_CALL_TARGET ldr w3,[x2,#240] ld1 {v0.4s},[x2],#16 ld1 {v2.16b},[x0] sub w3,w3,#2 ld1 {v1.4s},[x2],#16 Loop_dec: aesd v2.16b,v0.16b aesimc v2.16b,v2.16b ld1 {v0.4s},[x2],#16 subs w3,w3,#2 aesd v2.16b,v1.16b aesimc v2.16b,v2.16b ld1 {v1.4s},[x2],#16 b.gt Loop_dec aesd v2.16b,v0.16b aesimc v2.16b,v2.16b ld1 {v0.4s},[x2] aesd v2.16b,v1.16b eor v2.16b,v2.16b,v0.16b st1 {v2.16b},[x1] ret .globl _aes_hw_cbc_encrypt .private_extern _aes_hw_cbc_encrypt .align 5 _aes_hw_cbc_encrypt: // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 subs x2,x2,#16 mov x8,#16 b.lo Lcbc_abort csel x8,xzr,x8,eq cmp w5,#0 // en- or decrypting? ldr w5,[x3,#240] and x2,x2,#-16 ld1 {v6.16b},[x4] ld1 {v0.16b},[x0],x8 ld1 {v16.4s,v17.4s},[x3] // load key schedule... sub w5,w5,#6 add x7,x3,x5,lsl#4 // pointer to last 7 round keys sub w5,w5,#2 ld1 {v18.4s,v19.4s},[x7],#32 ld1 {v20.4s,v21.4s},[x7],#32 ld1 {v22.4s,v23.4s},[x7],#32 ld1 {v7.4s},[x7] add x7,x3,#32 mov w6,w5 b.eq Lcbc_dec cmp w5,#2 eor v0.16b,v0.16b,v6.16b eor v5.16b,v16.16b,v7.16b b.eq Lcbc_enc128 ld1 {v2.4s,v3.4s},[x7] add x7,x3,#16 add x6,x3,#16*4 add x12,x3,#16*5 aese v0.16b,v16.16b aesmc v0.16b,v0.16b add x14,x3,#16*6 add x3,x3,#16*7 b Lenter_cbc_enc .align 4 Loop_cbc_enc: aese v0.16b,v16.16b aesmc v0.16b,v0.16b st1 {v6.16b},[x1],#16 Lenter_cbc_enc: aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v0.16b,v2.16b aesmc v0.16b,v0.16b ld1 {v16.4s},[x6] cmp w5,#4 aese v0.16b,v3.16b aesmc v0.16b,v0.16b ld1 {v17.4s},[x12] b.eq Lcbc_enc192 aese v0.16b,v16.16b aesmc v0.16b,v0.16b ld1 {v16.4s},[x14] aese v0.16b,v17.16b aesmc v0.16b,v0.16b ld1 {v17.4s},[x3] nop Lcbc_enc192: aese v0.16b,v16.16b aesmc v0.16b,v0.16b subs x2,x2,#16 aese v0.16b,v17.16b aesmc v0.16b,v0.16b csel x8,xzr,x8,eq aese v0.16b,v18.16b aesmc v0.16b,v0.16b aese v0.16b,v19.16b aesmc v0.16b,v0.16b ld1 {v16.16b},[x0],x8 aese v0.16b,v20.16b aesmc v0.16b,v0.16b eor v16.16b,v16.16b,v5.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b ld1 {v17.4s},[x7] // re-pre-load rndkey[1] aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v0.16b,v23.16b eor v6.16b,v0.16b,v7.16b b.hs Loop_cbc_enc st1 {v6.16b},[x1],#16 b Lcbc_done .align 5 Lcbc_enc128: ld1 {v2.4s,v3.4s},[x7] aese v0.16b,v16.16b aesmc v0.16b,v0.16b b Lenter_cbc_enc128 Loop_cbc_enc128: aese v0.16b,v16.16b aesmc v0.16b,v0.16b st1 {v6.16b},[x1],#16 Lenter_cbc_enc128: aese v0.16b,v17.16b aesmc v0.16b,v0.16b subs x2,x2,#16 aese v0.16b,v2.16b aesmc v0.16b,v0.16b csel x8,xzr,x8,eq aese v0.16b,v3.16b aesmc v0.16b,v0.16b aese v0.16b,v18.16b aesmc v0.16b,v0.16b aese v0.16b,v19.16b aesmc v0.16b,v0.16b ld1 {v16.16b},[x0],x8 aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b aese v0.16b,v22.16b aesmc v0.16b,v0.16b eor v16.16b,v16.16b,v5.16b aese v0.16b,v23.16b eor v6.16b,v0.16b,v7.16b b.hs Loop_cbc_enc128 st1 {v6.16b},[x1],#16 b Lcbc_done .align 5 Lcbc_dec: ld1 {v18.16b},[x0],#16 subs x2,x2,#32 // bias add w6,w5,#2 orr v3.16b,v0.16b,v0.16b orr v1.16b,v0.16b,v0.16b orr v19.16b,v18.16b,v18.16b b.lo Lcbc_dec_tail orr v1.16b,v18.16b,v18.16b ld1 {v18.16b},[x0],#16 orr v2.16b,v0.16b,v0.16b orr v3.16b,v1.16b,v1.16b orr v19.16b,v18.16b,v18.16b Loop3x_cbc_dec: aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v18.16b,v16.16b aesimc v18.16b,v18.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v18.16b,v17.16b aesimc v18.16b,v18.16b ld1 {v17.4s},[x7],#16 b.gt Loop3x_cbc_dec aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v18.16b,v16.16b aesimc v18.16b,v18.16b eor v4.16b,v6.16b,v7.16b subs x2,x2,#0x30 eor v5.16b,v2.16b,v7.16b csel x6,x2,x6,lo // x6, w6, is zero at this point aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v18.16b,v17.16b aesimc v18.16b,v18.16b eor v17.16b,v3.16b,v7.16b add x0,x0,x6 // x0 is adjusted in such way that // at exit from the loop v1.16b-v18.16b // are loaded with last "words" orr v6.16b,v19.16b,v19.16b mov x7,x3 aesd v0.16b,v20.16b aesimc v0.16b,v0.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v18.16b,v20.16b aesimc v18.16b,v18.16b ld1 {v2.16b},[x0],#16 aesd v0.16b,v21.16b aesimc v0.16b,v0.16b aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v18.16b,v21.16b aesimc v18.16b,v18.16b ld1 {v3.16b},[x0],#16 aesd v0.16b,v22.16b aesimc v0.16b,v0.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v18.16b,v22.16b aesimc v18.16b,v18.16b ld1 {v19.16b},[x0],#16 aesd v0.16b,v23.16b aesd v1.16b,v23.16b aesd v18.16b,v23.16b ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] add w6,w5,#2 eor v4.16b,v4.16b,v0.16b eor v5.16b,v5.16b,v1.16b eor v18.16b,v18.16b,v17.16b ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v4.16b},[x1],#16 orr v0.16b,v2.16b,v2.16b st1 {v5.16b},[x1],#16 orr v1.16b,v3.16b,v3.16b st1 {v18.16b},[x1],#16 orr v18.16b,v19.16b,v19.16b b.hs Loop3x_cbc_dec cmn x2,#0x30 b.eq Lcbc_done nop Lcbc_dec_tail: aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v18.16b,v16.16b aesimc v18.16b,v18.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v18.16b,v17.16b aesimc v18.16b,v18.16b ld1 {v17.4s},[x7],#16 b.gt Lcbc_dec_tail aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v18.16b,v16.16b aesimc v18.16b,v18.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v18.16b,v17.16b aesimc v18.16b,v18.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v18.16b,v20.16b aesimc v18.16b,v18.16b cmn x2,#0x20 aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v18.16b,v21.16b aesimc v18.16b,v18.16b eor v5.16b,v6.16b,v7.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v18.16b,v22.16b aesimc v18.16b,v18.16b eor v17.16b,v3.16b,v7.16b aesd v1.16b,v23.16b aesd v18.16b,v23.16b b.eq Lcbc_dec_one eor v5.16b,v5.16b,v1.16b eor v17.16b,v17.16b,v18.16b orr v6.16b,v19.16b,v19.16b st1 {v5.16b},[x1],#16 st1 {v17.16b},[x1],#16 b Lcbc_done Lcbc_dec_one: eor v5.16b,v5.16b,v18.16b orr v6.16b,v19.16b,v19.16b st1 {v5.16b},[x1],#16 Lcbc_done: st1 {v6.16b},[x4] Lcbc_abort: ldr x29,[sp],#16 ret .globl _aes_hw_ctr32_encrypt_blocks .private_extern _aes_hw_ctr32_encrypt_blocks .align 5 _aes_hw_ctr32_encrypt_blocks: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9] // kFlag_aes_hw_ctr32_encrypt_blocks #endif // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 ldr w5,[x3,#240] ldr w8, [x4, #12] ld1 {v0.4s},[x4] ld1 {v16.4s,v17.4s},[x3] // load key schedule... sub w5,w5,#4 mov x12,#16 cmp x2,#2 add x7,x3,x5,lsl#4 // pointer to last 5 round keys sub w5,w5,#2 ld1 {v20.4s,v21.4s},[x7],#32 ld1 {v22.4s,v23.4s},[x7],#32 ld1 {v7.4s},[x7] add x7,x3,#32 mov w6,w5 // ARM Cortex-A57 and Cortex-A72 cores running in 32-bit mode are // affected by silicon errata #1742098 [0] and #1655431 [1], // respectively, where the second instruction of an aese/aesmc // instruction pair may execute twice if an interrupt is taken right // after the first instruction consumes an input register of which a // single 32-bit lane has been updated the last time it was modified. // // This function uses a counter in one 32-bit lane. The vmov lines // could write to v1.16b and v18.16b directly, but that trips this bugs. // We write to v6.16b and copy to the final register as a workaround. // // [0] ARM-EPM-049219 v23 Cortex-A57 MPCore Software Developers Errata Notice // [1] ARM-EPM-012079 v11.0 Cortex-A72 MPCore Software Developers Errata Notice #ifndef __AARCH64EB__ rev w8, w8 #endif add w10, w8, #1 orr v6.16b,v0.16b,v0.16b rev w10, w10 mov v6.s[3],w10 add w8, w8, #2 orr v1.16b,v6.16b,v6.16b b.ls Lctr32_tail rev w12, w8 mov v6.s[3],w12 sub x2,x2,#3 // bias orr v18.16b,v6.16b,v6.16b b Loop3x_ctr32 .align 4 Loop3x_ctr32: aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v18.16b,v16.16b aesmc v18.16b,v18.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v18.16b,v17.16b aesmc v18.16b,v18.16b ld1 {v17.4s},[x7],#16 b.gt Loop3x_ctr32 aese v0.16b,v16.16b aesmc v4.16b,v0.16b aese v1.16b,v16.16b aesmc v5.16b,v1.16b ld1 {v2.16b},[x0],#16 add w9,w8,#1 aese v18.16b,v16.16b aesmc v18.16b,v18.16b ld1 {v3.16b},[x0],#16 rev w9,w9 aese v4.16b,v17.16b aesmc v4.16b,v4.16b aese v5.16b,v17.16b aesmc v5.16b,v5.16b ld1 {v19.16b},[x0],#16 mov x7,x3 aese v18.16b,v17.16b aesmc v17.16b,v18.16b aese v4.16b,v20.16b aesmc v4.16b,v4.16b aese v5.16b,v20.16b aesmc v5.16b,v5.16b eor v2.16b,v2.16b,v7.16b add w10,w8,#2 aese v17.16b,v20.16b aesmc v17.16b,v17.16b eor v3.16b,v3.16b,v7.16b add w8,w8,#3 aese v4.16b,v21.16b aesmc v4.16b,v4.16b aese v5.16b,v21.16b aesmc v5.16b,v5.16b // Note the logic to update v0.16b, v1.16b, and v1.16b is written to work // around a bug in ARM Cortex-A57 and Cortex-A72 cores running in // 32-bit mode. See the comment above. eor v19.16b,v19.16b,v7.16b mov v6.s[3], w9 aese v17.16b,v21.16b aesmc v17.16b,v17.16b orr v0.16b,v6.16b,v6.16b rev w10,w10 aese v4.16b,v22.16b aesmc v4.16b,v4.16b mov v6.s[3], w10 rev w12,w8 aese v5.16b,v22.16b aesmc v5.16b,v5.16b orr v1.16b,v6.16b,v6.16b mov v6.s[3], w12 aese v17.16b,v22.16b aesmc v17.16b,v17.16b orr v18.16b,v6.16b,v6.16b subs x2,x2,#3 aese v4.16b,v23.16b aese v5.16b,v23.16b aese v17.16b,v23.16b eor v2.16b,v2.16b,v4.16b ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] st1 {v2.16b},[x1],#16 eor v3.16b,v3.16b,v5.16b mov w6,w5 st1 {v3.16b},[x1],#16 eor v19.16b,v19.16b,v17.16b ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v19.16b},[x1],#16 b.hs Loop3x_ctr32 adds x2,x2,#3 b.eq Lctr32_done Lctr32_tail: cmp x2,#1 b.lt Lctr32_done // if len = 0, go to done mov x12,#16 csel x12,xzr,x12,eq aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b ld1 {v17.4s},[x7],#16 b.gt Lctr32_tail aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b ld1 {v2.16b},[x0],x12 aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v1.16b,v20.16b aesmc v1.16b,v1.16b ld1 {v3.16b},[x0] aese v0.16b,v21.16b aesmc v0.16b,v0.16b aese v1.16b,v21.16b aesmc v1.16b,v1.16b eor v2.16b,v2.16b,v7.16b aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v1.16b,v22.16b aesmc v1.16b,v1.16b eor v3.16b,v3.16b,v7.16b aese v0.16b,v23.16b aese v1.16b,v23.16b eor v2.16b,v2.16b,v0.16b eor v3.16b,v3.16b,v1.16b st1 {v2.16b},[x1],#16 cbz x12,Lctr32_done // if step = 0 (len = 1), go to done st1 {v3.16b},[x1] Lctr32_done: ldr x29,[sp],#16 ret .globl _aes_hw_xts_encrypt .private_extern _aes_hw_xts_encrypt .align 5 _aes_hw_xts_encrypt: AARCH64_VALID_CALL_TARGET cmp x2,#16 // Original input data size bigger than 16, jump to big size processing. b.ne Lxts_enc_big_size // Encrypt the iv with key2, as the first XEX iv. ldr w6,[x4,#240] ld1 {v0.16b},[x4],#16 ld1 {v6.16b},[x5] sub w6,w6,#2 ld1 {v1.16b},[x4],#16 Loop_enc_iv_enc: aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4],#16 subs w6,w6,#2 aese v6.16b,v1.16b aesmc v6.16b,v6.16b ld1 {v1.4s},[x4],#16 b.gt Loop_enc_iv_enc aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4] aese v6.16b,v1.16b eor v6.16b,v6.16b,v0.16b ld1 {v0.16b},[x0] eor v0.16b,v6.16b,v0.16b ldr w6,[x3,#240] ld1 {v28.4s,v29.4s},[x3],#32 // load key schedule... aese v0.16b,v28.16b aesmc v0.16b,v0.16b ld1 {v16.4s,v17.4s},[x3],#32 // load key schedule... aese v0.16b,v29.16b aesmc v0.16b,v0.16b subs w6,w6,#10 // if rounds==10, jump to aes-128-xts processing b.eq Lxts_128_enc Lxts_enc_round_loop: aese v0.16b,v16.16b aesmc v0.16b,v0.16b ld1 {v16.4s},[x3],#16 // load key schedule... aese v0.16b,v17.16b aesmc v0.16b,v0.16b ld1 {v17.4s},[x3],#16 // load key schedule... subs w6,w6,#2 // bias b.gt Lxts_enc_round_loop Lxts_128_enc: ld1 {v18.4s,v19.4s},[x3],#32 // load key schedule... aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v0.16b,v17.16b aesmc v0.16b,v0.16b ld1 {v20.4s,v21.4s},[x3],#32 // load key schedule... aese v0.16b,v18.16b aesmc v0.16b,v0.16b aese v0.16b,v19.16b aesmc v0.16b,v0.16b ld1 {v22.4s,v23.4s},[x3],#32 // load key schedule... aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b ld1 {v7.4s},[x3] aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v0.16b,v23.16b eor v0.16b,v0.16b,v7.16b eor v0.16b,v0.16b,v6.16b st1 {v0.16b},[x1] b Lxts_enc_final_abort .align 4 Lxts_enc_big_size: // Encrypt input size > 16 bytes stp x19,x20,[sp,#-64]! stp x21,x22,[sp,#48] stp d8,d9,[sp,#32] stp d10,d11,[sp,#16] // tailcnt store the tail value of length%16. and x21,x2,#0xf and x2,x2,#-16 // len &= 0x1..110000, now divisible by 16 subs x2,x2,#16 mov x8,#16 b.lo Lxts_abort // if !(len > 16): error csel x8,xzr,x8,eq // if (len == 16): step = 0 // Firstly, encrypt the iv with key2, as the first iv of XEX. ldr w6,[x4,#240] ld1 {v0.4s},[x4],#16 ld1 {v6.16b},[x5] sub w6,w6,#2 ld1 {v1.4s},[x4],#16 Loop_iv_enc: aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4],#16 subs w6,w6,#2 aese v6.16b,v1.16b aesmc v6.16b,v6.16b ld1 {v1.4s},[x4],#16 b.gt Loop_iv_enc aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4] aese v6.16b,v1.16b eor v6.16b,v6.16b,v0.16b // The iv for second block // x9- iv(low), x10 - iv(high) // the five ivs stored into, v6.16b,v8.16b,v9.16b,v10.16b,v11.16b fmov x9,d6 fmov x10,v6.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d8,x9 fmov v8.d[1],x10 ldr w5,[x3,#240] // next starting point ld1 {v0.16b},[x0],x8 ld1 {v16.4s,v17.4s},[x3] // load key schedule... sub w5,w5,#6 add x7,x3,x5,lsl#4 // pointer to last 7 round keys sub w5,w5,#2 ld1 {v18.4s,v19.4s},[x7],#32 ld1 {v20.4s,v21.4s},[x7],#32 ld1 {v22.4s,v23.4s},[x7],#32 ld1 {v7.4s},[x7] add x7,x3,#32 mov w6,w5 // Encryption Lxts_enc: ld1 {v24.16b},[x0],#16 subs x2,x2,#32 // bias add w6,w5,#2 orr v3.16b,v0.16b,v0.16b orr v1.16b,v0.16b,v0.16b orr v28.16b,v0.16b,v0.16b orr v27.16b,v24.16b,v24.16b orr v29.16b,v24.16b,v24.16b b.lo Lxts_inner_enc_tail // when input size % 5 = 1 or 2 // (with tail or not) eor v0.16b,v0.16b,v6.16b // before encryption, xor with iv eor v24.16b,v24.16b,v8.16b // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d9,x9 fmov v9.d[1],x10 orr v1.16b,v24.16b,v24.16b ld1 {v24.16b},[x0],#16 orr v2.16b,v0.16b,v0.16b orr v3.16b,v1.16b,v1.16b eor v27.16b,v24.16b,v9.16b // the third block eor v24.16b,v24.16b,v9.16b cmp x2,#32 b.lo Lxts_outer_enc_tail // The iv for fourth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d10,x9 fmov v10.d[1],x10 ld1 {v25.16b},[x0],#16 // The iv for fifth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d11,x9 fmov v11.d[1],x10 ld1 {v26.16b},[x0],#16 eor v25.16b,v25.16b,v10.16b // the fourth block eor v26.16b,v26.16b,v11.16b sub x2,x2,#32 // bias mov w6,w5 b Loop5x_xts_enc .align 4 Loop5x_xts_enc: aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b aese v25.16b,v16.16b aesmc v25.16b,v25.16b aese v26.16b,v16.16b aesmc v26.16b,v26.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b aese v25.16b,v17.16b aesmc v25.16b,v25.16b aese v26.16b,v17.16b aesmc v26.16b,v26.16b ld1 {v17.4s},[x7],#16 b.gt Loop5x_xts_enc aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b aese v25.16b,v16.16b aesmc v25.16b,v25.16b aese v26.16b,v16.16b aesmc v26.16b,v26.16b subs x2,x2,#0x50 // because Lxts_enc_tail4x aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b aese v25.16b,v17.16b aesmc v25.16b,v25.16b aese v26.16b,v17.16b aesmc v26.16b,v26.16b csel x6,xzr,x2,gt // borrow x6, w6, "gt" is not typo mov x7,x3 aese v0.16b,v18.16b aesmc v0.16b,v0.16b aese v1.16b,v18.16b aesmc v1.16b,v1.16b aese v24.16b,v18.16b aesmc v24.16b,v24.16b aese v25.16b,v18.16b aesmc v25.16b,v25.16b aese v26.16b,v18.16b aesmc v26.16b,v26.16b add x0,x0,x6 // x0 is adjusted in such way that // at exit from the loop v1.16b-v26.16b // are loaded with last "words" add x6,x2,#0x60 // because Lxts_enc_tail4x aese v0.16b,v19.16b aesmc v0.16b,v0.16b aese v1.16b,v19.16b aesmc v1.16b,v1.16b aese v24.16b,v19.16b aesmc v24.16b,v24.16b aese v25.16b,v19.16b aesmc v25.16b,v25.16b aese v26.16b,v19.16b aesmc v26.16b,v26.16b aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v1.16b,v20.16b aesmc v1.16b,v1.16b aese v24.16b,v20.16b aesmc v24.16b,v24.16b aese v25.16b,v20.16b aesmc v25.16b,v25.16b aese v26.16b,v20.16b aesmc v26.16b,v26.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b aese v1.16b,v21.16b aesmc v1.16b,v1.16b aese v24.16b,v21.16b aesmc v24.16b,v24.16b aese v25.16b,v21.16b aesmc v25.16b,v25.16b aese v26.16b,v21.16b aesmc v26.16b,v26.16b aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v1.16b,v22.16b aesmc v1.16b,v1.16b aese v24.16b,v22.16b aesmc v24.16b,v24.16b aese v25.16b,v22.16b aesmc v25.16b,v25.16b aese v26.16b,v22.16b aesmc v26.16b,v26.16b eor v4.16b,v7.16b,v6.16b aese v0.16b,v23.16b // The iv for first block of one iteration extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d6,x9 fmov v6.d[1],x10 eor v5.16b,v7.16b,v8.16b ld1 {v2.16b},[x0],#16 aese v1.16b,v23.16b // The iv for second block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d8,x9 fmov v8.d[1],x10 eor v17.16b,v7.16b,v9.16b ld1 {v3.16b},[x0],#16 aese v24.16b,v23.16b // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d9,x9 fmov v9.d[1],x10 eor v30.16b,v7.16b,v10.16b ld1 {v27.16b},[x0],#16 aese v25.16b,v23.16b // The iv for fourth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d10,x9 fmov v10.d[1],x10 eor v31.16b,v7.16b,v11.16b ld1 {v28.16b},[x0],#16 aese v26.16b,v23.16b // The iv for fifth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d11,x9 fmov v11.d[1],x10 ld1 {v29.16b},[x0],#16 cbz x6,Lxts_enc_tail4x ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] eor v4.16b,v4.16b,v0.16b eor v0.16b,v2.16b,v6.16b eor v5.16b,v5.16b,v1.16b eor v1.16b,v3.16b,v8.16b eor v17.16b,v17.16b,v24.16b eor v24.16b,v27.16b,v9.16b eor v30.16b,v30.16b,v25.16b eor v25.16b,v28.16b,v10.16b eor v31.16b,v31.16b,v26.16b st1 {v4.16b},[x1],#16 eor v26.16b,v29.16b,v11.16b st1 {v5.16b},[x1],#16 mov w6,w5 st1 {v17.16b},[x1],#16 ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v30.16b},[x1],#16 st1 {v31.16b},[x1],#16 b.hs Loop5x_xts_enc // If left 4 blocks, borrow the five block's processing. // This means if (x2 + 1 block) == 0, which is the case // when input size % 5 = 4, continue processing and do // another iteration in Loop5x_xts_enc which will exit from // cbz x6,.Lxts_enc_tail4x. // Otherwise, this is the end of the loop continue processing // 0, 1, 2 or 3 blocks (with or without tail) starting at // Loop5x_enc_after cmn x2,#0x10 b.ne Loop5x_enc_after orr v11.16b,v10.16b,v10.16b orr v10.16b,v9.16b,v9.16b orr v9.16b,v8.16b,v8.16b orr v8.16b,v6.16b,v6.16b fmov x9,d11 fmov x10,v11.d[1] eor v0.16b,v6.16b,v2.16b eor v1.16b,v8.16b,v3.16b eor v24.16b,v27.16b,v9.16b eor v25.16b,v28.16b,v10.16b eor v26.16b,v29.16b,v11.16b b.eq Loop5x_xts_enc Loop5x_enc_after: add x2,x2,#0x50 cbz x2,Lxts_enc_done // no blocks left add w6,w5,#2 subs x2,x2,#0x30 b.lo Lxts_inner_enc_tail // 1 or 2 blocks left // (with tail or not) eor v0.16b,v6.16b,v27.16b // 3 blocks left eor v1.16b,v8.16b,v28.16b eor v24.16b,v29.16b,v9.16b b Lxts_outer_enc_tail .align 4 Lxts_enc_tail4x: add x0,x0,#16 eor v5.16b,v1.16b,v5.16b st1 {v5.16b},[x1],#16 eor v17.16b,v24.16b,v17.16b st1 {v17.16b},[x1],#16 eor v30.16b,v25.16b,v30.16b eor v31.16b,v26.16b,v31.16b st1 {v30.16b,v31.16b},[x1],#32 b Lxts_enc_done .align 4 Lxts_outer_enc_tail: aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b ld1 {v17.4s},[x7],#16 b.gt Lxts_outer_enc_tail aese v0.16b,v16.16b aesmc v0.16b,v0.16b aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b eor v4.16b,v6.16b,v7.16b subs x2,x2,#0x30 // The iv for first block fmov x9,d9 fmov x10,v9.d[1] //mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr#31 eor x9,x11,x9,lsl#1 fmov d6,x9 fmov v6.d[1],x10 eor v5.16b,v8.16b,v7.16b csel x6,x2,x6,lo // x6, w6, is zero at this point aese v0.16b,v17.16b aesmc v0.16b,v0.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b eor v17.16b,v9.16b,v7.16b add x6,x6,#0x20 add x0,x0,x6 mov x7,x3 aese v0.16b,v20.16b aesmc v0.16b,v0.16b aese v1.16b,v20.16b aesmc v1.16b,v1.16b aese v24.16b,v20.16b aesmc v24.16b,v24.16b aese v0.16b,v21.16b aesmc v0.16b,v0.16b aese v1.16b,v21.16b aesmc v1.16b,v1.16b aese v24.16b,v21.16b aesmc v24.16b,v24.16b aese v0.16b,v22.16b aesmc v0.16b,v0.16b aese v1.16b,v22.16b aesmc v1.16b,v1.16b aese v24.16b,v22.16b aesmc v24.16b,v24.16b aese v0.16b,v23.16b aese v1.16b,v23.16b aese v24.16b,v23.16b ld1 {v27.16b},[x0],#16 add w6,w5,#2 ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] eor v4.16b,v4.16b,v0.16b eor v5.16b,v5.16b,v1.16b eor v24.16b,v24.16b,v17.16b ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v4.16b},[x1],#16 st1 {v5.16b},[x1],#16 st1 {v24.16b},[x1],#16 cmn x2,#0x30 b.eq Lxts_enc_done Lxts_encxor_one: orr v28.16b,v3.16b,v3.16b orr v29.16b,v27.16b,v27.16b nop Lxts_inner_enc_tail: cmn x2,#0x10 eor v1.16b,v28.16b,v6.16b eor v24.16b,v29.16b,v8.16b b.eq Lxts_enc_tail_loop eor v24.16b,v29.16b,v6.16b Lxts_enc_tail_loop: aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b ld1 {v17.4s},[x7],#16 b.gt Lxts_enc_tail_loop aese v1.16b,v16.16b aesmc v1.16b,v1.16b aese v24.16b,v16.16b aesmc v24.16b,v24.16b aese v1.16b,v17.16b aesmc v1.16b,v1.16b aese v24.16b,v17.16b aesmc v24.16b,v24.16b aese v1.16b,v20.16b aesmc v1.16b,v1.16b aese v24.16b,v20.16b aesmc v24.16b,v24.16b cmn x2,#0x20 aese v1.16b,v21.16b aesmc v1.16b,v1.16b aese v24.16b,v21.16b aesmc v24.16b,v24.16b eor v5.16b,v6.16b,v7.16b aese v1.16b,v22.16b aesmc v1.16b,v1.16b aese v24.16b,v22.16b aesmc v24.16b,v24.16b eor v17.16b,v8.16b,v7.16b aese v1.16b,v23.16b aese v24.16b,v23.16b b.eq Lxts_enc_one eor v5.16b,v5.16b,v1.16b st1 {v5.16b},[x1],#16 eor v17.16b,v17.16b,v24.16b orr v6.16b,v8.16b,v8.16b st1 {v17.16b},[x1],#16 fmov x9,d8 fmov x10,v8.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d6,x9 fmov v6.d[1],x10 b Lxts_enc_done Lxts_enc_one: eor v5.16b,v5.16b,v24.16b orr v6.16b,v6.16b,v6.16b st1 {v5.16b},[x1],#16 fmov x9,d6 fmov x10,v6.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d6,x9 fmov v6.d[1],x10 b Lxts_enc_done .align 5 Lxts_enc_done: // Process the tail block with cipher stealing. tst x21,#0xf b.eq Lxts_abort mov x20,x0 mov x13,x1 sub x1,x1,#16 .composite_enc_loop: subs x21,x21,#1 ldrb w15,[x1,x21] ldrb w14,[x20,x21] strb w15,[x13,x21] strb w14,[x1,x21] b.gt .composite_enc_loop Lxts_enc_load_done: ld1 {v26.16b},[x1] eor v26.16b,v26.16b,v6.16b // Encrypt the composite block to get the last second encrypted text block ldr w6,[x3,#240] // load key schedule... ld1 {v0.16b},[x3],#16 sub w6,w6,#2 ld1 {v1.16b},[x3],#16 // load key schedule... Loop_final_enc: aese v26.16b,v0.16b aesmc v26.16b,v26.16b ld1 {v0.4s},[x3],#16 subs w6,w6,#2 aese v26.16b,v1.16b aesmc v26.16b,v26.16b ld1 {v1.4s},[x3],#16 b.gt Loop_final_enc aese v26.16b,v0.16b aesmc v26.16b,v26.16b ld1 {v0.4s},[x3] aese v26.16b,v1.16b eor v26.16b,v26.16b,v0.16b eor v26.16b,v26.16b,v6.16b st1 {v26.16b},[x1] Lxts_abort: ldp x21,x22,[sp,#48] ldp d8,d9,[sp,#32] ldp d10,d11,[sp,#16] ldp x19,x20,[sp],#64 Lxts_enc_final_abort: ret .globl _aes_hw_xts_decrypt .private_extern _aes_hw_xts_decrypt .align 5 _aes_hw_xts_decrypt: AARCH64_VALID_CALL_TARGET cmp x2,#16 // Original input data size bigger than 16, jump to big size processing. b.ne Lxts_dec_big_size // Encrypt the iv with key2, as the first XEX iv. ldr w6,[x4,#240] ld1 {v0.16b},[x4],#16 ld1 {v6.16b},[x5] sub w6,w6,#2 ld1 {v1.16b},[x4],#16 Loop_dec_small_iv_enc: aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4],#16 subs w6,w6,#2 aese v6.16b,v1.16b aesmc v6.16b,v6.16b ld1 {v1.4s},[x4],#16 b.gt Loop_dec_small_iv_enc aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4] aese v6.16b,v1.16b eor v6.16b,v6.16b,v0.16b ld1 {v0.16b},[x0] eor v0.16b,v6.16b,v0.16b ldr w6,[x3,#240] ld1 {v28.4s,v29.4s},[x3],#32 // load key schedule... aesd v0.16b,v28.16b aesimc v0.16b,v0.16b ld1 {v16.4s,v17.4s},[x3],#32 // load key schedule... aesd v0.16b,v29.16b aesimc v0.16b,v0.16b subs w6,w6,#10 // bias b.eq Lxts_128_dec Lxts_dec_round_loop: aesd v0.16b,v16.16b aesimc v0.16b,v0.16b ld1 {v16.4s},[x3],#16 // load key schedule... aesd v0.16b,v17.16b aesimc v0.16b,v0.16b ld1 {v17.4s},[x3],#16 // load key schedule... subs w6,w6,#2 // bias b.gt Lxts_dec_round_loop Lxts_128_dec: ld1 {v18.4s,v19.4s},[x3],#32 // load key schedule... aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v0.16b,v17.16b aesimc v0.16b,v0.16b ld1 {v20.4s,v21.4s},[x3],#32 // load key schedule... aesd v0.16b,v18.16b aesimc v0.16b,v0.16b aesd v0.16b,v19.16b aesimc v0.16b,v0.16b ld1 {v22.4s,v23.4s},[x3],#32 // load key schedule... aesd v0.16b,v20.16b aesimc v0.16b,v0.16b aesd v0.16b,v21.16b aesimc v0.16b,v0.16b ld1 {v7.4s},[x3] aesd v0.16b,v22.16b aesimc v0.16b,v0.16b aesd v0.16b,v23.16b eor v0.16b,v0.16b,v7.16b eor v0.16b,v6.16b,v0.16b st1 {v0.16b},[x1] b Lxts_dec_final_abort Lxts_dec_big_size: stp x19,x20,[sp,#-64]! stp x21,x22,[sp,#48] stp d8,d9,[sp,#32] stp d10,d11,[sp,#16] and x21,x2,#0xf and x2,x2,#-16 subs x2,x2,#16 mov x8,#16 b.lo Lxts_dec_abort // Encrypt the iv with key2, as the first XEX iv ldr w6,[x4,#240] ld1 {v0.16b},[x4],#16 ld1 {v6.16b},[x5] sub w6,w6,#2 ld1 {v1.16b},[x4],#16 Loop_dec_iv_enc: aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4],#16 subs w6,w6,#2 aese v6.16b,v1.16b aesmc v6.16b,v6.16b ld1 {v1.4s},[x4],#16 b.gt Loop_dec_iv_enc aese v6.16b,v0.16b aesmc v6.16b,v6.16b ld1 {v0.4s},[x4] aese v6.16b,v1.16b eor v6.16b,v6.16b,v0.16b // The iv for second block // x9- iv(low), x10 - iv(high) // the five ivs stored into, v6.16b,v8.16b,v9.16b,v10.16b,v11.16b fmov x9,d6 fmov x10,v6.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d8,x9 fmov v8.d[1],x10 ldr w5,[x3,#240] // load rounds number // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d9,x9 fmov v9.d[1],x10 ld1 {v16.4s,v17.4s},[x3] // load key schedule... sub w5,w5,#6 add x7,x3,x5,lsl#4 // pointer to last 7 round keys sub w5,w5,#2 ld1 {v18.4s,v19.4s},[x7],#32 // load key schedule... ld1 {v20.4s,v21.4s},[x7],#32 ld1 {v22.4s,v23.4s},[x7],#32 ld1 {v7.4s},[x7] // The iv for fourth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d10,x9 fmov v10.d[1],x10 add x7,x3,#32 mov w6,w5 b Lxts_dec // Decryption .align 5 Lxts_dec: tst x21,#0xf b.eq Lxts_dec_begin subs x2,x2,#16 csel x8,xzr,x8,eq ld1 {v0.16b},[x0],#16 b.lo Lxts_done sub x0,x0,#16 Lxts_dec_begin: ld1 {v0.16b},[x0],x8 subs x2,x2,#32 // bias add w6,w5,#2 orr v3.16b,v0.16b,v0.16b orr v1.16b,v0.16b,v0.16b orr v28.16b,v0.16b,v0.16b ld1 {v24.16b},[x0],#16 orr v27.16b,v24.16b,v24.16b orr v29.16b,v24.16b,v24.16b b.lo Lxts_inner_dec_tail eor v0.16b,v0.16b,v6.16b // before decryt, xor with iv eor v24.16b,v24.16b,v8.16b orr v1.16b,v24.16b,v24.16b ld1 {v24.16b},[x0],#16 orr v2.16b,v0.16b,v0.16b orr v3.16b,v1.16b,v1.16b eor v27.16b,v24.16b,v9.16b // third block xox with third iv eor v24.16b,v24.16b,v9.16b cmp x2,#32 b.lo Lxts_outer_dec_tail ld1 {v25.16b},[x0],#16 // The iv for fifth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d11,x9 fmov v11.d[1],x10 ld1 {v26.16b},[x0],#16 eor v25.16b,v25.16b,v10.16b // the fourth block eor v26.16b,v26.16b,v11.16b sub x2,x2,#32 // bias mov w6,w5 b Loop5x_xts_dec .align 4 Loop5x_xts_dec: aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b aesd v25.16b,v16.16b aesimc v25.16b,v25.16b aesd v26.16b,v16.16b aesimc v26.16b,v26.16b ld1 {v16.4s},[x7],#16 // load key schedule... subs w6,w6,#2 aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b aesd v25.16b,v17.16b aesimc v25.16b,v25.16b aesd v26.16b,v17.16b aesimc v26.16b,v26.16b ld1 {v17.4s},[x7],#16 // load key schedule... b.gt Loop5x_xts_dec aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b aesd v25.16b,v16.16b aesimc v25.16b,v25.16b aesd v26.16b,v16.16b aesimc v26.16b,v26.16b subs x2,x2,#0x50 // because Lxts_dec_tail4x aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b aesd v25.16b,v17.16b aesimc v25.16b,v25.16b aesd v26.16b,v17.16b aesimc v26.16b,v26.16b csel x6,xzr,x2,gt // borrow x6, w6, "gt" is not typo mov x7,x3 aesd v0.16b,v18.16b aesimc v0.16b,v0.16b aesd v1.16b,v18.16b aesimc v1.16b,v1.16b aesd v24.16b,v18.16b aesimc v24.16b,v24.16b aesd v25.16b,v18.16b aesimc v25.16b,v25.16b aesd v26.16b,v18.16b aesimc v26.16b,v26.16b add x0,x0,x6 // x0 is adjusted in such way that // at exit from the loop v1.16b-v26.16b // are loaded with last "words" add x6,x2,#0x60 // because Lxts_dec_tail4x aesd v0.16b,v19.16b aesimc v0.16b,v0.16b aesd v1.16b,v19.16b aesimc v1.16b,v1.16b aesd v24.16b,v19.16b aesimc v24.16b,v24.16b aesd v25.16b,v19.16b aesimc v25.16b,v25.16b aesd v26.16b,v19.16b aesimc v26.16b,v26.16b aesd v0.16b,v20.16b aesimc v0.16b,v0.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v24.16b,v20.16b aesimc v24.16b,v24.16b aesd v25.16b,v20.16b aesimc v25.16b,v25.16b aesd v26.16b,v20.16b aesimc v26.16b,v26.16b aesd v0.16b,v21.16b aesimc v0.16b,v0.16b aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v24.16b,v21.16b aesimc v24.16b,v24.16b aesd v25.16b,v21.16b aesimc v25.16b,v25.16b aesd v26.16b,v21.16b aesimc v26.16b,v26.16b aesd v0.16b,v22.16b aesimc v0.16b,v0.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v24.16b,v22.16b aesimc v24.16b,v24.16b aesd v25.16b,v22.16b aesimc v25.16b,v25.16b aesd v26.16b,v22.16b aesimc v26.16b,v26.16b eor v4.16b,v7.16b,v6.16b aesd v0.16b,v23.16b // The iv for first block of next iteration. extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d6,x9 fmov v6.d[1],x10 eor v5.16b,v7.16b,v8.16b ld1 {v2.16b},[x0],#16 aesd v1.16b,v23.16b // The iv for second block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d8,x9 fmov v8.d[1],x10 eor v17.16b,v7.16b,v9.16b ld1 {v3.16b},[x0],#16 aesd v24.16b,v23.16b // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d9,x9 fmov v9.d[1],x10 eor v30.16b,v7.16b,v10.16b ld1 {v27.16b},[x0],#16 aesd v25.16b,v23.16b // The iv for fourth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d10,x9 fmov v10.d[1],x10 eor v31.16b,v7.16b,v11.16b ld1 {v28.16b},[x0],#16 aesd v26.16b,v23.16b // The iv for fifth block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d11,x9 fmov v11.d[1],x10 ld1 {v29.16b},[x0],#16 cbz x6,Lxts_dec_tail4x ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] eor v4.16b,v4.16b,v0.16b eor v0.16b,v2.16b,v6.16b eor v5.16b,v5.16b,v1.16b eor v1.16b,v3.16b,v8.16b eor v17.16b,v17.16b,v24.16b eor v24.16b,v27.16b,v9.16b eor v30.16b,v30.16b,v25.16b eor v25.16b,v28.16b,v10.16b eor v31.16b,v31.16b,v26.16b st1 {v4.16b},[x1],#16 eor v26.16b,v29.16b,v11.16b st1 {v5.16b},[x1],#16 mov w6,w5 st1 {v17.16b},[x1],#16 ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v30.16b},[x1],#16 st1 {v31.16b},[x1],#16 b.hs Loop5x_xts_dec cmn x2,#0x10 b.ne Loop5x_dec_after // If x2(x2) equal to -0x10, the left blocks is 4. // After specially processing, utilize the five blocks processing again. // It will use the following IVs: v6.16b,v6.16b,v8.16b,v9.16b,v10.16b. orr v11.16b,v10.16b,v10.16b orr v10.16b,v9.16b,v9.16b orr v9.16b,v8.16b,v8.16b orr v8.16b,v6.16b,v6.16b fmov x9,d11 fmov x10,v11.d[1] eor v0.16b,v6.16b,v2.16b eor v1.16b,v8.16b,v3.16b eor v24.16b,v27.16b,v9.16b eor v25.16b,v28.16b,v10.16b eor v26.16b,v29.16b,v11.16b b.eq Loop5x_xts_dec Loop5x_dec_after: add x2,x2,#0x50 cbz x2,Lxts_done add w6,w5,#2 subs x2,x2,#0x30 b.lo Lxts_inner_dec_tail eor v0.16b,v6.16b,v27.16b eor v1.16b,v8.16b,v28.16b eor v24.16b,v29.16b,v9.16b b Lxts_outer_dec_tail .align 4 Lxts_dec_tail4x: add x0,x0,#16 tst x21,#0xf eor v5.16b,v1.16b,v4.16b st1 {v5.16b},[x1],#16 eor v17.16b,v24.16b,v17.16b st1 {v17.16b},[x1],#16 eor v30.16b,v25.16b,v30.16b eor v31.16b,v26.16b,v31.16b st1 {v30.16b,v31.16b},[x1],#32 b.eq Lxts_dec_abort ld1 {v0.4s},[x0],#16 b Lxts_done .align 4 Lxts_outer_dec_tail: aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b ld1 {v17.4s},[x7],#16 b.gt Lxts_outer_dec_tail aesd v0.16b,v16.16b aesimc v0.16b,v0.16b aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b eor v4.16b,v6.16b,v7.16b subs x2,x2,#0x30 // The iv for first block fmov x9,d9 fmov x10,v9.d[1] mov w19,#0x87 extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d6,x9 fmov v6.d[1],x10 eor v5.16b,v8.16b,v7.16b csel x6,x2,x6,lo // x6, w6, is zero at this point aesd v0.16b,v17.16b aesimc v0.16b,v0.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b eor v17.16b,v9.16b,v7.16b // The iv for second block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d8,x9 fmov v8.d[1],x10 add x6,x6,#0x20 add x0,x0,x6 // x0 is adjusted to the last data mov x7,x3 // The iv for third block extr x22,x10,x10,#32 extr x10,x10,x9,#63 and w11,w19,w22,asr #31 eor x9,x11,x9,lsl #1 fmov d9,x9 fmov v9.d[1],x10 aesd v0.16b,v20.16b aesimc v0.16b,v0.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v24.16b,v20.16b aesimc v24.16b,v24.16b aesd v0.16b,v21.16b aesimc v0.16b,v0.16b aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v24.16b,v21.16b aesimc v24.16b,v24.16b aesd v0.16b,v22.16b aesimc v0.16b,v0.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v24.16b,v22.16b aesimc v24.16b,v24.16b ld1 {v27.16b},[x0],#16 aesd v0.16b,v23.16b aesd v1.16b,v23.16b aesd v24.16b,v23.16b ld1 {v16.4s},[x7],#16 // re-pre-load rndkey[0] add w6,w5,#2 eor v4.16b,v4.16b,v0.16b eor v5.16b,v5.16b,v1.16b eor v24.16b,v24.16b,v17.16b ld1 {v17.4s},[x7],#16 // re-pre-load rndkey[1] st1 {v4.16b},[x1],#16 st1 {v5.16b},[x1],#16 st1 {v24.16b},[x1],#16 cmn x2,#0x30 add x2,x2,#0x30 b.eq Lxts_done sub x2,x2,#0x30 orr v28.16b,v3.16b,v3.16b orr v29.16b,v27.16b,v27.16b nop Lxts_inner_dec_tail: // x2 == -0x10 means two blocks left. cmn x2,#0x10 eor v1.16b,v28.16b,v6.16b eor v24.16b,v29.16b,v8.16b b.eq Lxts_dec_tail_loop eor v24.16b,v29.16b,v6.16b Lxts_dec_tail_loop: aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b ld1 {v16.4s},[x7],#16 subs w6,w6,#2 aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b ld1 {v17.4s},[x7],#16 b.gt Lxts_dec_tail_loop aesd v1.16b,v16.16b aesimc v1.16b,v1.16b aesd v24.16b,v16.16b aesimc v24.16b,v24.16b aesd v1.16b,v17.16b aesimc v1.16b,v1.16b aesd v24.16b,v17.16b aesimc v24.16b,v24.16b aesd v1.16b,v20.16b aesimc v1.16b,v1.16b aesd v24.16b,v20.16b aesimc v24.16b,v24.16b cmn x2,#0x20 aesd v1.16b,v21.16b aesimc v1.16b,v1.16b aesd v24.16b,v21.16b aesimc v24.16b,v24.16b eor v5.16b,v6.16b,v7.16b aesd v1.16b,v22.16b aesimc v1.16b,v1.16b aesd v24.16b,v22.16b aesimc v24.16b,v24.16b eor v17.16b,v8.16b,v7.16b aesd v1.16b,v23.16b aesd v24.16b,v23.16b b.eq Lxts_dec_one eor v5.16b,v5.16b,v1.16b eor v17.16b,v17.16b,v24.16b orr v6.16b,v9.16b,v9.16b orr v8.16b,v10.16b,v10.16b st1 {v5.16b},[x1],#16 st1 {v17.16b},[x1],#16 add x2,x2,#16 b Lxts_done Lxts_dec_one: eor v5.16b,v5.16b,v24.16b orr v6.16b,v8.16b,v8.16b orr v8.16b,v9.16b,v9.16b st1 {v5.16b},[x1],#16 add x2,x2,#32 Lxts_done: tst x21,#0xf b.eq Lxts_dec_abort // Processing the last two blocks with cipher stealing. mov x7,x3 cbnz x2,Lxts_dec_1st_done ld1 {v0.4s},[x0],#16 // Decrypt the last secod block to get the last plain text block Lxts_dec_1st_done: eor v26.16b,v0.16b,v8.16b ldr w6,[x3,#240] ld1 {v0.4s},[x3],#16 sub w6,w6,#2 ld1 {v1.4s},[x3],#16 Loop_final_2nd_dec: aesd v26.16b,v0.16b aesimc v26.16b,v26.16b ld1 {v0.4s},[x3],#16 // load key schedule... subs w6,w6,#2 aesd v26.16b,v1.16b aesimc v26.16b,v26.16b ld1 {v1.4s},[x3],#16 // load key schedule... b.gt Loop_final_2nd_dec aesd v26.16b,v0.16b aesimc v26.16b,v26.16b ld1 {v0.4s},[x3] aesd v26.16b,v1.16b eor v26.16b,v26.16b,v0.16b eor v26.16b,v26.16b,v8.16b st1 {v26.16b},[x1] mov x20,x0 add x13,x1,#16 // Composite the tailcnt "16 byte not aligned block" into the last second plain blocks // to get the last encrypted block. .composite_dec_loop: subs x21,x21,#1 ldrb w15,[x1,x21] ldrb w14,[x20,x21] strb w15,[x13,x21] strb w14,[x1,x21] b.gt .composite_dec_loop Lxts_dec_load_done: ld1 {v26.16b},[x1] eor v26.16b,v26.16b,v6.16b // Decrypt the composite block to get the last second plain text block ldr w6,[x7,#240] ld1 {v0.16b},[x7],#16 sub w6,w6,#2 ld1 {v1.16b},[x7],#16 Loop_final_dec: aesd v26.16b,v0.16b aesimc v26.16b,v26.16b ld1 {v0.4s},[x7],#16 // load key schedule... subs w6,w6,#2 aesd v26.16b,v1.16b aesimc v26.16b,v26.16b ld1 {v1.4s},[x7],#16 // load key schedule... b.gt Loop_final_dec aesd v26.16b,v0.16b aesimc v26.16b,v26.16b ld1 {v0.4s},[x7] aesd v26.16b,v1.16b eor v26.16b,v26.16b,v0.16b eor v26.16b,v26.16b,v6.16b st1 {v26.16b},[x1] Lxts_dec_abort: ldp x21,x22,[sp,#48] ldp d8,d9,[sp,#32] ldp d10,d11,[sp,#16] ldp x19,x20,[sp],#64 Lxts_dec_final_abort: ret #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
34,155
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/sha256-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) // Copyright 2014-2020 The OpenSSL Project Authors. All Rights Reserved. // // Licensed under the OpenSSL license (the "License"). You may not use // this file except in compliance with the License. You can obtain a copy // in the file LICENSE in the source distribution or at // https://www.openssl.org/source/license.html // ==================================================================== // Written by Andy Polyakov <appro@openssl.org> for the OpenSSL // project. The module is, however, dual licensed under OpenSSL and // CRYPTOGAMS licenses depending on where you obtain it. For further // details see http://www.openssl.org/~appro/cryptogams/. // // Permission to use under GPLv2 terms is granted. // ==================================================================== // // SHA256/512 for ARMv8. // // Performance in cycles per processed byte and improvement coefficient // over code generated with "default" compiler: // // SHA256-hw SHA256(*) SHA512 // Apple A7 1.97 10.5 (+33%) 6.73 (-1%(**)) // Cortex-A53 2.38 15.5 (+115%) 10.0 (+150%(***)) // Cortex-A57 2.31 11.6 (+86%) 7.51 (+260%(***)) // Denver 2.01 10.5 (+26%) 6.70 (+8%) // X-Gene 20.0 (+100%) 12.8 (+300%(***)) // Mongoose 2.36 13.0 (+50%) 8.36 (+33%) // Kryo 1.92 17.4 (+30%) 11.2 (+8%) // // (*) Software SHA256 results are of lesser relevance, presented // mostly for informational purposes. // (**) The result is a trade-off: it's possible to improve it by // 10% (or by 1 cycle per round), but at the cost of 20% loss // on Cortex-A53 (or by 4 cycles per round). // (***) Super-impressive coefficients over gcc-generated code are // indication of some compiler "pathology", most notably code // generated with -mgeneral-regs-only is significantly faster // and the gap is only 40-90%. #ifndef __KERNEL__ # include <openssl/arm_arch.h> #endif .text .globl _sha256_block_data_order_nohw .private_extern _sha256_block_data_order_nohw .align 6 _sha256_block_data_order_nohw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#4*4 ldp w20,w21,[x0] // load context ldp w22,w23,[x0,#2*4] ldp w24,w25,[x0,#4*4] add x2,x1,x2,lsl#6 // end of input ldp w26,w27,[x0,#6*4] adrp x30,LK256@PAGE add x30,x30,LK256@PAGEOFF stp x0,x2,[x29,#96] Loop: ldp w3,w4,[x1],#2*4 ldr w19,[x30],#4 // *K++ eor w28,w21,w22 // magic seed str x1,[x29,#112] #ifndef __AARCH64EB__ rev w3,w3 // 0 #endif ror w16,w24,#6 add w27,w27,w19 // h+=K[i] eor w6,w24,w24,ror#14 and w17,w25,w24 bic w19,w26,w24 add w27,w27,w3 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w20,w21 // a^b, b^c in next round eor w16,w16,w6,ror#11 // Sigma1(e) ror w6,w20,#2 add w27,w27,w17 // h+=Ch(e,f,g) eor w17,w20,w20,ror#9 add w27,w27,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w23,w23,w27 // d+=h eor w28,w28,w21 // Maj(a,b,c) eor w17,w6,w17,ror#13 // Sigma0(a) add w27,w27,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w27,w27,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w4,w4 // 1 #endif ldp w5,w6,[x1],#2*4 add w27,w27,w17 // h+=Sigma0(a) ror w16,w23,#6 add w26,w26,w28 // h+=K[i] eor w7,w23,w23,ror#14 and w17,w24,w23 bic w28,w25,w23 add w26,w26,w4 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w27,w20 // a^b, b^c in next round eor w16,w16,w7,ror#11 // Sigma1(e) ror w7,w27,#2 add w26,w26,w17 // h+=Ch(e,f,g) eor w17,w27,w27,ror#9 add w26,w26,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w22,w22,w26 // d+=h eor w19,w19,w20 // Maj(a,b,c) eor w17,w7,w17,ror#13 // Sigma0(a) add w26,w26,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w26,w26,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w5,w5 // 2 #endif add w26,w26,w17 // h+=Sigma0(a) ror w16,w22,#6 add w25,w25,w19 // h+=K[i] eor w8,w22,w22,ror#14 and w17,w23,w22 bic w19,w24,w22 add w25,w25,w5 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w26,w27 // a^b, b^c in next round eor w16,w16,w8,ror#11 // Sigma1(e) ror w8,w26,#2 add w25,w25,w17 // h+=Ch(e,f,g) eor w17,w26,w26,ror#9 add w25,w25,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w21,w21,w25 // d+=h eor w28,w28,w27 // Maj(a,b,c) eor w17,w8,w17,ror#13 // Sigma0(a) add w25,w25,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w25,w25,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w6,w6 // 3 #endif ldp w7,w8,[x1],#2*4 add w25,w25,w17 // h+=Sigma0(a) ror w16,w21,#6 add w24,w24,w28 // h+=K[i] eor w9,w21,w21,ror#14 and w17,w22,w21 bic w28,w23,w21 add w24,w24,w6 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w25,w26 // a^b, b^c in next round eor w16,w16,w9,ror#11 // Sigma1(e) ror w9,w25,#2 add w24,w24,w17 // h+=Ch(e,f,g) eor w17,w25,w25,ror#9 add w24,w24,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w20,w20,w24 // d+=h eor w19,w19,w26 // Maj(a,b,c) eor w17,w9,w17,ror#13 // Sigma0(a) add w24,w24,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w24,w24,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w7,w7 // 4 #endif add w24,w24,w17 // h+=Sigma0(a) ror w16,w20,#6 add w23,w23,w19 // h+=K[i] eor w10,w20,w20,ror#14 and w17,w21,w20 bic w19,w22,w20 add w23,w23,w7 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w24,w25 // a^b, b^c in next round eor w16,w16,w10,ror#11 // Sigma1(e) ror w10,w24,#2 add w23,w23,w17 // h+=Ch(e,f,g) eor w17,w24,w24,ror#9 add w23,w23,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w27,w27,w23 // d+=h eor w28,w28,w25 // Maj(a,b,c) eor w17,w10,w17,ror#13 // Sigma0(a) add w23,w23,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w23,w23,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w8,w8 // 5 #endif ldp w9,w10,[x1],#2*4 add w23,w23,w17 // h+=Sigma0(a) ror w16,w27,#6 add w22,w22,w28 // h+=K[i] eor w11,w27,w27,ror#14 and w17,w20,w27 bic w28,w21,w27 add w22,w22,w8 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w23,w24 // a^b, b^c in next round eor w16,w16,w11,ror#11 // Sigma1(e) ror w11,w23,#2 add w22,w22,w17 // h+=Ch(e,f,g) eor w17,w23,w23,ror#9 add w22,w22,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w26,w26,w22 // d+=h eor w19,w19,w24 // Maj(a,b,c) eor w17,w11,w17,ror#13 // Sigma0(a) add w22,w22,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w22,w22,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w9,w9 // 6 #endif add w22,w22,w17 // h+=Sigma0(a) ror w16,w26,#6 add w21,w21,w19 // h+=K[i] eor w12,w26,w26,ror#14 and w17,w27,w26 bic w19,w20,w26 add w21,w21,w9 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w22,w23 // a^b, b^c in next round eor w16,w16,w12,ror#11 // Sigma1(e) ror w12,w22,#2 add w21,w21,w17 // h+=Ch(e,f,g) eor w17,w22,w22,ror#9 add w21,w21,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w25,w25,w21 // d+=h eor w28,w28,w23 // Maj(a,b,c) eor w17,w12,w17,ror#13 // Sigma0(a) add w21,w21,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w21,w21,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w10,w10 // 7 #endif ldp w11,w12,[x1],#2*4 add w21,w21,w17 // h+=Sigma0(a) ror w16,w25,#6 add w20,w20,w28 // h+=K[i] eor w13,w25,w25,ror#14 and w17,w26,w25 bic w28,w27,w25 add w20,w20,w10 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w21,w22 // a^b, b^c in next round eor w16,w16,w13,ror#11 // Sigma1(e) ror w13,w21,#2 add w20,w20,w17 // h+=Ch(e,f,g) eor w17,w21,w21,ror#9 add w20,w20,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w24,w24,w20 // d+=h eor w19,w19,w22 // Maj(a,b,c) eor w17,w13,w17,ror#13 // Sigma0(a) add w20,w20,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w20,w20,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w11,w11 // 8 #endif add w20,w20,w17 // h+=Sigma0(a) ror w16,w24,#6 add w27,w27,w19 // h+=K[i] eor w14,w24,w24,ror#14 and w17,w25,w24 bic w19,w26,w24 add w27,w27,w11 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w20,w21 // a^b, b^c in next round eor w16,w16,w14,ror#11 // Sigma1(e) ror w14,w20,#2 add w27,w27,w17 // h+=Ch(e,f,g) eor w17,w20,w20,ror#9 add w27,w27,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w23,w23,w27 // d+=h eor w28,w28,w21 // Maj(a,b,c) eor w17,w14,w17,ror#13 // Sigma0(a) add w27,w27,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w27,w27,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w12,w12 // 9 #endif ldp w13,w14,[x1],#2*4 add w27,w27,w17 // h+=Sigma0(a) ror w16,w23,#6 add w26,w26,w28 // h+=K[i] eor w15,w23,w23,ror#14 and w17,w24,w23 bic w28,w25,w23 add w26,w26,w12 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w27,w20 // a^b, b^c in next round eor w16,w16,w15,ror#11 // Sigma1(e) ror w15,w27,#2 add w26,w26,w17 // h+=Ch(e,f,g) eor w17,w27,w27,ror#9 add w26,w26,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w22,w22,w26 // d+=h eor w19,w19,w20 // Maj(a,b,c) eor w17,w15,w17,ror#13 // Sigma0(a) add w26,w26,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w26,w26,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w13,w13 // 10 #endif add w26,w26,w17 // h+=Sigma0(a) ror w16,w22,#6 add w25,w25,w19 // h+=K[i] eor w0,w22,w22,ror#14 and w17,w23,w22 bic w19,w24,w22 add w25,w25,w13 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w26,w27 // a^b, b^c in next round eor w16,w16,w0,ror#11 // Sigma1(e) ror w0,w26,#2 add w25,w25,w17 // h+=Ch(e,f,g) eor w17,w26,w26,ror#9 add w25,w25,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w21,w21,w25 // d+=h eor w28,w28,w27 // Maj(a,b,c) eor w17,w0,w17,ror#13 // Sigma0(a) add w25,w25,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w25,w25,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w14,w14 // 11 #endif ldp w15,w0,[x1],#2*4 add w25,w25,w17 // h+=Sigma0(a) str w6,[sp,#12] ror w16,w21,#6 add w24,w24,w28 // h+=K[i] eor w6,w21,w21,ror#14 and w17,w22,w21 bic w28,w23,w21 add w24,w24,w14 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w25,w26 // a^b, b^c in next round eor w16,w16,w6,ror#11 // Sigma1(e) ror w6,w25,#2 add w24,w24,w17 // h+=Ch(e,f,g) eor w17,w25,w25,ror#9 add w24,w24,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w20,w20,w24 // d+=h eor w19,w19,w26 // Maj(a,b,c) eor w17,w6,w17,ror#13 // Sigma0(a) add w24,w24,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w24,w24,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w15,w15 // 12 #endif add w24,w24,w17 // h+=Sigma0(a) str w7,[sp,#0] ror w16,w20,#6 add w23,w23,w19 // h+=K[i] eor w7,w20,w20,ror#14 and w17,w21,w20 bic w19,w22,w20 add w23,w23,w15 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w24,w25 // a^b, b^c in next round eor w16,w16,w7,ror#11 // Sigma1(e) ror w7,w24,#2 add w23,w23,w17 // h+=Ch(e,f,g) eor w17,w24,w24,ror#9 add w23,w23,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w27,w27,w23 // d+=h eor w28,w28,w25 // Maj(a,b,c) eor w17,w7,w17,ror#13 // Sigma0(a) add w23,w23,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w23,w23,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w0,w0 // 13 #endif ldp w1,w2,[x1] add w23,w23,w17 // h+=Sigma0(a) str w8,[sp,#4] ror w16,w27,#6 add w22,w22,w28 // h+=K[i] eor w8,w27,w27,ror#14 and w17,w20,w27 bic w28,w21,w27 add w22,w22,w0 // h+=X[i] orr w17,w17,w28 // Ch(e,f,g) eor w28,w23,w24 // a^b, b^c in next round eor w16,w16,w8,ror#11 // Sigma1(e) ror w8,w23,#2 add w22,w22,w17 // h+=Ch(e,f,g) eor w17,w23,w23,ror#9 add w22,w22,w16 // h+=Sigma1(e) and w19,w19,w28 // (b^c)&=(a^b) add w26,w26,w22 // d+=h eor w19,w19,w24 // Maj(a,b,c) eor w17,w8,w17,ror#13 // Sigma0(a) add w22,w22,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round //add w22,w22,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w1,w1 // 14 #endif ldr w6,[sp,#12] add w22,w22,w17 // h+=Sigma0(a) str w9,[sp,#8] ror w16,w26,#6 add w21,w21,w19 // h+=K[i] eor w9,w26,w26,ror#14 and w17,w27,w26 bic w19,w20,w26 add w21,w21,w1 // h+=X[i] orr w17,w17,w19 // Ch(e,f,g) eor w19,w22,w23 // a^b, b^c in next round eor w16,w16,w9,ror#11 // Sigma1(e) ror w9,w22,#2 add w21,w21,w17 // h+=Ch(e,f,g) eor w17,w22,w22,ror#9 add w21,w21,w16 // h+=Sigma1(e) and w28,w28,w19 // (b^c)&=(a^b) add w25,w25,w21 // d+=h eor w28,w28,w23 // Maj(a,b,c) eor w17,w9,w17,ror#13 // Sigma0(a) add w21,w21,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round //add w21,w21,w17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev w2,w2 // 15 #endif ldr w7,[sp,#0] add w21,w21,w17 // h+=Sigma0(a) str w10,[sp,#12] ror w16,w25,#6 add w20,w20,w28 // h+=K[i] ror w9,w4,#7 and w17,w26,w25 ror w8,w1,#17 bic w28,w27,w25 ror w10,w21,#2 add w20,w20,w2 // h+=X[i] eor w16,w16,w25,ror#11 eor w9,w9,w4,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w21,w22 // a^b, b^c in next round eor w16,w16,w25,ror#25 // Sigma1(e) eor w10,w10,w21,ror#13 add w20,w20,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w8,w8,w1,ror#19 eor w9,w9,w4,lsr#3 // sigma0(X[i+1]) add w20,w20,w16 // h+=Sigma1(e) eor w19,w19,w22 // Maj(a,b,c) eor w17,w10,w21,ror#22 // Sigma0(a) eor w8,w8,w1,lsr#10 // sigma1(X[i+14]) add w3,w3,w12 add w24,w24,w20 // d+=h add w20,w20,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w3,w3,w9 add w20,w20,w17 // h+=Sigma0(a) add w3,w3,w8 Loop_16_xx: ldr w8,[sp,#4] str w11,[sp,#0] ror w16,w24,#6 add w27,w27,w19 // h+=K[i] ror w10,w5,#7 and w17,w25,w24 ror w9,w2,#17 bic w19,w26,w24 ror w11,w20,#2 add w27,w27,w3 // h+=X[i] eor w16,w16,w24,ror#11 eor w10,w10,w5,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w20,w21 // a^b, b^c in next round eor w16,w16,w24,ror#25 // Sigma1(e) eor w11,w11,w20,ror#13 add w27,w27,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w9,w9,w2,ror#19 eor w10,w10,w5,lsr#3 // sigma0(X[i+1]) add w27,w27,w16 // h+=Sigma1(e) eor w28,w28,w21 // Maj(a,b,c) eor w17,w11,w20,ror#22 // Sigma0(a) eor w9,w9,w2,lsr#10 // sigma1(X[i+14]) add w4,w4,w13 add w23,w23,w27 // d+=h add w27,w27,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w4,w4,w10 add w27,w27,w17 // h+=Sigma0(a) add w4,w4,w9 ldr w9,[sp,#8] str w12,[sp,#4] ror w16,w23,#6 add w26,w26,w28 // h+=K[i] ror w11,w6,#7 and w17,w24,w23 ror w10,w3,#17 bic w28,w25,w23 ror w12,w27,#2 add w26,w26,w4 // h+=X[i] eor w16,w16,w23,ror#11 eor w11,w11,w6,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w27,w20 // a^b, b^c in next round eor w16,w16,w23,ror#25 // Sigma1(e) eor w12,w12,w27,ror#13 add w26,w26,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w10,w10,w3,ror#19 eor w11,w11,w6,lsr#3 // sigma0(X[i+1]) add w26,w26,w16 // h+=Sigma1(e) eor w19,w19,w20 // Maj(a,b,c) eor w17,w12,w27,ror#22 // Sigma0(a) eor w10,w10,w3,lsr#10 // sigma1(X[i+14]) add w5,w5,w14 add w22,w22,w26 // d+=h add w26,w26,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w5,w5,w11 add w26,w26,w17 // h+=Sigma0(a) add w5,w5,w10 ldr w10,[sp,#12] str w13,[sp,#8] ror w16,w22,#6 add w25,w25,w19 // h+=K[i] ror w12,w7,#7 and w17,w23,w22 ror w11,w4,#17 bic w19,w24,w22 ror w13,w26,#2 add w25,w25,w5 // h+=X[i] eor w16,w16,w22,ror#11 eor w12,w12,w7,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w26,w27 // a^b, b^c in next round eor w16,w16,w22,ror#25 // Sigma1(e) eor w13,w13,w26,ror#13 add w25,w25,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w11,w11,w4,ror#19 eor w12,w12,w7,lsr#3 // sigma0(X[i+1]) add w25,w25,w16 // h+=Sigma1(e) eor w28,w28,w27 // Maj(a,b,c) eor w17,w13,w26,ror#22 // Sigma0(a) eor w11,w11,w4,lsr#10 // sigma1(X[i+14]) add w6,w6,w15 add w21,w21,w25 // d+=h add w25,w25,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w6,w6,w12 add w25,w25,w17 // h+=Sigma0(a) add w6,w6,w11 ldr w11,[sp,#0] str w14,[sp,#12] ror w16,w21,#6 add w24,w24,w28 // h+=K[i] ror w13,w8,#7 and w17,w22,w21 ror w12,w5,#17 bic w28,w23,w21 ror w14,w25,#2 add w24,w24,w6 // h+=X[i] eor w16,w16,w21,ror#11 eor w13,w13,w8,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w25,w26 // a^b, b^c in next round eor w16,w16,w21,ror#25 // Sigma1(e) eor w14,w14,w25,ror#13 add w24,w24,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w12,w12,w5,ror#19 eor w13,w13,w8,lsr#3 // sigma0(X[i+1]) add w24,w24,w16 // h+=Sigma1(e) eor w19,w19,w26 // Maj(a,b,c) eor w17,w14,w25,ror#22 // Sigma0(a) eor w12,w12,w5,lsr#10 // sigma1(X[i+14]) add w7,w7,w0 add w20,w20,w24 // d+=h add w24,w24,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w7,w7,w13 add w24,w24,w17 // h+=Sigma0(a) add w7,w7,w12 ldr w12,[sp,#4] str w15,[sp,#0] ror w16,w20,#6 add w23,w23,w19 // h+=K[i] ror w14,w9,#7 and w17,w21,w20 ror w13,w6,#17 bic w19,w22,w20 ror w15,w24,#2 add w23,w23,w7 // h+=X[i] eor w16,w16,w20,ror#11 eor w14,w14,w9,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w24,w25 // a^b, b^c in next round eor w16,w16,w20,ror#25 // Sigma1(e) eor w15,w15,w24,ror#13 add w23,w23,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w13,w13,w6,ror#19 eor w14,w14,w9,lsr#3 // sigma0(X[i+1]) add w23,w23,w16 // h+=Sigma1(e) eor w28,w28,w25 // Maj(a,b,c) eor w17,w15,w24,ror#22 // Sigma0(a) eor w13,w13,w6,lsr#10 // sigma1(X[i+14]) add w8,w8,w1 add w27,w27,w23 // d+=h add w23,w23,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w8,w8,w14 add w23,w23,w17 // h+=Sigma0(a) add w8,w8,w13 ldr w13,[sp,#8] str w0,[sp,#4] ror w16,w27,#6 add w22,w22,w28 // h+=K[i] ror w15,w10,#7 and w17,w20,w27 ror w14,w7,#17 bic w28,w21,w27 ror w0,w23,#2 add w22,w22,w8 // h+=X[i] eor w16,w16,w27,ror#11 eor w15,w15,w10,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w23,w24 // a^b, b^c in next round eor w16,w16,w27,ror#25 // Sigma1(e) eor w0,w0,w23,ror#13 add w22,w22,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w14,w14,w7,ror#19 eor w15,w15,w10,lsr#3 // sigma0(X[i+1]) add w22,w22,w16 // h+=Sigma1(e) eor w19,w19,w24 // Maj(a,b,c) eor w17,w0,w23,ror#22 // Sigma0(a) eor w14,w14,w7,lsr#10 // sigma1(X[i+14]) add w9,w9,w2 add w26,w26,w22 // d+=h add w22,w22,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w9,w9,w15 add w22,w22,w17 // h+=Sigma0(a) add w9,w9,w14 ldr w14,[sp,#12] str w1,[sp,#8] ror w16,w26,#6 add w21,w21,w19 // h+=K[i] ror w0,w11,#7 and w17,w27,w26 ror w15,w8,#17 bic w19,w20,w26 ror w1,w22,#2 add w21,w21,w9 // h+=X[i] eor w16,w16,w26,ror#11 eor w0,w0,w11,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w22,w23 // a^b, b^c in next round eor w16,w16,w26,ror#25 // Sigma1(e) eor w1,w1,w22,ror#13 add w21,w21,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w15,w15,w8,ror#19 eor w0,w0,w11,lsr#3 // sigma0(X[i+1]) add w21,w21,w16 // h+=Sigma1(e) eor w28,w28,w23 // Maj(a,b,c) eor w17,w1,w22,ror#22 // Sigma0(a) eor w15,w15,w8,lsr#10 // sigma1(X[i+14]) add w10,w10,w3 add w25,w25,w21 // d+=h add w21,w21,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w10,w10,w0 add w21,w21,w17 // h+=Sigma0(a) add w10,w10,w15 ldr w15,[sp,#0] str w2,[sp,#12] ror w16,w25,#6 add w20,w20,w28 // h+=K[i] ror w1,w12,#7 and w17,w26,w25 ror w0,w9,#17 bic w28,w27,w25 ror w2,w21,#2 add w20,w20,w10 // h+=X[i] eor w16,w16,w25,ror#11 eor w1,w1,w12,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w21,w22 // a^b, b^c in next round eor w16,w16,w25,ror#25 // Sigma1(e) eor w2,w2,w21,ror#13 add w20,w20,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w0,w0,w9,ror#19 eor w1,w1,w12,lsr#3 // sigma0(X[i+1]) add w20,w20,w16 // h+=Sigma1(e) eor w19,w19,w22 // Maj(a,b,c) eor w17,w2,w21,ror#22 // Sigma0(a) eor w0,w0,w9,lsr#10 // sigma1(X[i+14]) add w11,w11,w4 add w24,w24,w20 // d+=h add w20,w20,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w11,w11,w1 add w20,w20,w17 // h+=Sigma0(a) add w11,w11,w0 ldr w0,[sp,#4] str w3,[sp,#0] ror w16,w24,#6 add w27,w27,w19 // h+=K[i] ror w2,w13,#7 and w17,w25,w24 ror w1,w10,#17 bic w19,w26,w24 ror w3,w20,#2 add w27,w27,w11 // h+=X[i] eor w16,w16,w24,ror#11 eor w2,w2,w13,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w20,w21 // a^b, b^c in next round eor w16,w16,w24,ror#25 // Sigma1(e) eor w3,w3,w20,ror#13 add w27,w27,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w1,w1,w10,ror#19 eor w2,w2,w13,lsr#3 // sigma0(X[i+1]) add w27,w27,w16 // h+=Sigma1(e) eor w28,w28,w21 // Maj(a,b,c) eor w17,w3,w20,ror#22 // Sigma0(a) eor w1,w1,w10,lsr#10 // sigma1(X[i+14]) add w12,w12,w5 add w23,w23,w27 // d+=h add w27,w27,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w12,w12,w2 add w27,w27,w17 // h+=Sigma0(a) add w12,w12,w1 ldr w1,[sp,#8] str w4,[sp,#4] ror w16,w23,#6 add w26,w26,w28 // h+=K[i] ror w3,w14,#7 and w17,w24,w23 ror w2,w11,#17 bic w28,w25,w23 ror w4,w27,#2 add w26,w26,w12 // h+=X[i] eor w16,w16,w23,ror#11 eor w3,w3,w14,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w27,w20 // a^b, b^c in next round eor w16,w16,w23,ror#25 // Sigma1(e) eor w4,w4,w27,ror#13 add w26,w26,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w2,w2,w11,ror#19 eor w3,w3,w14,lsr#3 // sigma0(X[i+1]) add w26,w26,w16 // h+=Sigma1(e) eor w19,w19,w20 // Maj(a,b,c) eor w17,w4,w27,ror#22 // Sigma0(a) eor w2,w2,w11,lsr#10 // sigma1(X[i+14]) add w13,w13,w6 add w22,w22,w26 // d+=h add w26,w26,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w13,w13,w3 add w26,w26,w17 // h+=Sigma0(a) add w13,w13,w2 ldr w2,[sp,#12] str w5,[sp,#8] ror w16,w22,#6 add w25,w25,w19 // h+=K[i] ror w4,w15,#7 and w17,w23,w22 ror w3,w12,#17 bic w19,w24,w22 ror w5,w26,#2 add w25,w25,w13 // h+=X[i] eor w16,w16,w22,ror#11 eor w4,w4,w15,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w26,w27 // a^b, b^c in next round eor w16,w16,w22,ror#25 // Sigma1(e) eor w5,w5,w26,ror#13 add w25,w25,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w3,w3,w12,ror#19 eor w4,w4,w15,lsr#3 // sigma0(X[i+1]) add w25,w25,w16 // h+=Sigma1(e) eor w28,w28,w27 // Maj(a,b,c) eor w17,w5,w26,ror#22 // Sigma0(a) eor w3,w3,w12,lsr#10 // sigma1(X[i+14]) add w14,w14,w7 add w21,w21,w25 // d+=h add w25,w25,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w14,w14,w4 add w25,w25,w17 // h+=Sigma0(a) add w14,w14,w3 ldr w3,[sp,#0] str w6,[sp,#12] ror w16,w21,#6 add w24,w24,w28 // h+=K[i] ror w5,w0,#7 and w17,w22,w21 ror w4,w13,#17 bic w28,w23,w21 ror w6,w25,#2 add w24,w24,w14 // h+=X[i] eor w16,w16,w21,ror#11 eor w5,w5,w0,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w25,w26 // a^b, b^c in next round eor w16,w16,w21,ror#25 // Sigma1(e) eor w6,w6,w25,ror#13 add w24,w24,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w4,w4,w13,ror#19 eor w5,w5,w0,lsr#3 // sigma0(X[i+1]) add w24,w24,w16 // h+=Sigma1(e) eor w19,w19,w26 // Maj(a,b,c) eor w17,w6,w25,ror#22 // Sigma0(a) eor w4,w4,w13,lsr#10 // sigma1(X[i+14]) add w15,w15,w8 add w20,w20,w24 // d+=h add w24,w24,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w15,w15,w5 add w24,w24,w17 // h+=Sigma0(a) add w15,w15,w4 ldr w4,[sp,#4] str w7,[sp,#0] ror w16,w20,#6 add w23,w23,w19 // h+=K[i] ror w6,w1,#7 and w17,w21,w20 ror w5,w14,#17 bic w19,w22,w20 ror w7,w24,#2 add w23,w23,w15 // h+=X[i] eor w16,w16,w20,ror#11 eor w6,w6,w1,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w24,w25 // a^b, b^c in next round eor w16,w16,w20,ror#25 // Sigma1(e) eor w7,w7,w24,ror#13 add w23,w23,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w5,w5,w14,ror#19 eor w6,w6,w1,lsr#3 // sigma0(X[i+1]) add w23,w23,w16 // h+=Sigma1(e) eor w28,w28,w25 // Maj(a,b,c) eor w17,w7,w24,ror#22 // Sigma0(a) eor w5,w5,w14,lsr#10 // sigma1(X[i+14]) add w0,w0,w9 add w27,w27,w23 // d+=h add w23,w23,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w0,w0,w6 add w23,w23,w17 // h+=Sigma0(a) add w0,w0,w5 ldr w5,[sp,#8] str w8,[sp,#4] ror w16,w27,#6 add w22,w22,w28 // h+=K[i] ror w7,w2,#7 and w17,w20,w27 ror w6,w15,#17 bic w28,w21,w27 ror w8,w23,#2 add w22,w22,w0 // h+=X[i] eor w16,w16,w27,ror#11 eor w7,w7,w2,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w23,w24 // a^b, b^c in next round eor w16,w16,w27,ror#25 // Sigma1(e) eor w8,w8,w23,ror#13 add w22,w22,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w6,w6,w15,ror#19 eor w7,w7,w2,lsr#3 // sigma0(X[i+1]) add w22,w22,w16 // h+=Sigma1(e) eor w19,w19,w24 // Maj(a,b,c) eor w17,w8,w23,ror#22 // Sigma0(a) eor w6,w6,w15,lsr#10 // sigma1(X[i+14]) add w1,w1,w10 add w26,w26,w22 // d+=h add w22,w22,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w1,w1,w7 add w22,w22,w17 // h+=Sigma0(a) add w1,w1,w6 ldr w6,[sp,#12] str w9,[sp,#8] ror w16,w26,#6 add w21,w21,w19 // h+=K[i] ror w8,w3,#7 and w17,w27,w26 ror w7,w0,#17 bic w19,w20,w26 ror w9,w22,#2 add w21,w21,w1 // h+=X[i] eor w16,w16,w26,ror#11 eor w8,w8,w3,ror#18 orr w17,w17,w19 // Ch(e,f,g) eor w19,w22,w23 // a^b, b^c in next round eor w16,w16,w26,ror#25 // Sigma1(e) eor w9,w9,w22,ror#13 add w21,w21,w17 // h+=Ch(e,f,g) and w28,w28,w19 // (b^c)&=(a^b) eor w7,w7,w0,ror#19 eor w8,w8,w3,lsr#3 // sigma0(X[i+1]) add w21,w21,w16 // h+=Sigma1(e) eor w28,w28,w23 // Maj(a,b,c) eor w17,w9,w22,ror#22 // Sigma0(a) eor w7,w7,w0,lsr#10 // sigma1(X[i+14]) add w2,w2,w11 add w25,w25,w21 // d+=h add w21,w21,w28 // h+=Maj(a,b,c) ldr w28,[x30],#4 // *K++, w19 in next round add w2,w2,w8 add w21,w21,w17 // h+=Sigma0(a) add w2,w2,w7 ldr w7,[sp,#0] str w10,[sp,#12] ror w16,w25,#6 add w20,w20,w28 // h+=K[i] ror w9,w4,#7 and w17,w26,w25 ror w8,w1,#17 bic w28,w27,w25 ror w10,w21,#2 add w20,w20,w2 // h+=X[i] eor w16,w16,w25,ror#11 eor w9,w9,w4,ror#18 orr w17,w17,w28 // Ch(e,f,g) eor w28,w21,w22 // a^b, b^c in next round eor w16,w16,w25,ror#25 // Sigma1(e) eor w10,w10,w21,ror#13 add w20,w20,w17 // h+=Ch(e,f,g) and w19,w19,w28 // (b^c)&=(a^b) eor w8,w8,w1,ror#19 eor w9,w9,w4,lsr#3 // sigma0(X[i+1]) add w20,w20,w16 // h+=Sigma1(e) eor w19,w19,w22 // Maj(a,b,c) eor w17,w10,w21,ror#22 // Sigma0(a) eor w8,w8,w1,lsr#10 // sigma1(X[i+14]) add w3,w3,w12 add w24,w24,w20 // d+=h add w20,w20,w19 // h+=Maj(a,b,c) ldr w19,[x30],#4 // *K++, w28 in next round add w3,w3,w9 add w20,w20,w17 // h+=Sigma0(a) add w3,w3,w8 cbnz w19,Loop_16_xx ldp x0,x2,[x29,#96] ldr x1,[x29,#112] sub x30,x30,#260 // rewind ldp w3,w4,[x0] ldp w5,w6,[x0,#2*4] add x1,x1,#14*4 // advance input pointer ldp w7,w8,[x0,#4*4] add w20,w20,w3 ldp w9,w10,[x0,#6*4] add w21,w21,w4 add w22,w22,w5 add w23,w23,w6 stp w20,w21,[x0] add w24,w24,w7 add w25,w25,w8 stp w22,w23,[x0,#2*4] add w26,w26,w9 add w27,w27,w10 cmp x1,x2 stp w24,w25,[x0,#4*4] stp w26,w27,[x0,#6*4] b.ne Loop ldp x19,x20,[x29,#16] add sp,sp,#4*4 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .section __TEXT,__const .align 6 LK256: .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0 //terminator .byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 .text #ifndef __KERNEL__ .globl _sha256_block_data_order_hw .private_extern _sha256_block_data_order_hw .align 6 _sha256_block_data_order_hw: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#6] // kFlag_sha256_hw #endif // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 ld1 {v0.4s,v1.4s},[x0] adrp x3,LK256@PAGE add x3,x3,LK256@PAGEOFF Loop_hw: ld1 {v4.16b,v5.16b,v6.16b,v7.16b},[x1],#64 sub x2,x2,#1 ld1 {v16.4s},[x3],#16 rev32 v4.16b,v4.16b rev32 v5.16b,v5.16b rev32 v6.16b,v6.16b rev32 v7.16b,v7.16b orr v18.16b,v0.16b,v0.16b // offload orr v19.16b,v1.16b,v1.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v4.4s .long 0x5e2828a4 //sha256su0 v4.16b,v5.16b orr v2.16b,v0.16b,v0.16b .long 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .long 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .long 0x5e0760c4 //sha256su1 v4.16b,v6.16b,v7.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v5.4s .long 0x5e2828c5 //sha256su0 v5.16b,v6.16b orr v2.16b,v0.16b,v0.16b .long 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .long 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .long 0x5e0460e5 //sha256su1 v5.16b,v7.16b,v4.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v6.4s .long 0x5e2828e6 //sha256su0 v6.16b,v7.16b orr v2.16b,v0.16b,v0.16b .long 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .long 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .long 0x5e056086 //sha256su1 v6.16b,v4.16b,v5.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v7.4s .long 0x5e282887 //sha256su0 v7.16b,v4.16b orr v2.16b,v0.16b,v0.16b .long 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .long 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .long 0x5e0660a7 //sha256su1 v7.16b,v5.16b,v6.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v4.4s .long 0x5e2828a4 //sha256su0 v4.16b,v5.16b orr v2.16b,v0.16b,v0.16b .long 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .long 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .long 0x5e0760c4 //sha256su1 v4.16b,v6.16b,v7.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v5.4s .long 0x5e2828c5 //sha256su0 v5.16b,v6.16b orr v2.16b,v0.16b,v0.16b .long 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .long 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .long 0x5e0460e5 //sha256su1 v5.16b,v7.16b,v4.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v6.4s .long 0x5e2828e6 //sha256su0 v6.16b,v7.16b orr v2.16b,v0.16b,v0.16b .long 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .long 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .long 0x5e056086 //sha256su1 v6.16b,v4.16b,v5.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v7.4s .long 0x5e282887 //sha256su0 v7.16b,v4.16b orr v2.16b,v0.16b,v0.16b .long 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .long 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .long 0x5e0660a7 //sha256su1 v7.16b,v5.16b,v6.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v4.4s .long 0x5e2828a4 //sha256su0 v4.16b,v5.16b orr v2.16b,v0.16b,v0.16b .long 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .long 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .long 0x5e0760c4 //sha256su1 v4.16b,v6.16b,v7.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v5.4s .long 0x5e2828c5 //sha256su0 v5.16b,v6.16b orr v2.16b,v0.16b,v0.16b .long 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .long 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .long 0x5e0460e5 //sha256su1 v5.16b,v7.16b,v4.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v6.4s .long 0x5e2828e6 //sha256su0 v6.16b,v7.16b orr v2.16b,v0.16b,v0.16b .long 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .long 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s .long 0x5e056086 //sha256su1 v6.16b,v4.16b,v5.16b ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v7.4s .long 0x5e282887 //sha256su0 v7.16b,v4.16b orr v2.16b,v0.16b,v0.16b .long 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .long 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s .long 0x5e0660a7 //sha256su1 v7.16b,v5.16b,v6.16b ld1 {v17.4s},[x3],#16 add v16.4s,v16.4s,v4.4s orr v2.16b,v0.16b,v0.16b .long 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .long 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s ld1 {v16.4s},[x3],#16 add v17.4s,v17.4s,v5.4s orr v2.16b,v0.16b,v0.16b .long 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .long 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s ld1 {v17.4s},[x3] add v16.4s,v16.4s,v6.4s sub x3,x3,#64*4-16 // rewind orr v2.16b,v0.16b,v0.16b .long 0x5e104020 //sha256h v0.16b,v1.16b,v16.4s .long 0x5e105041 //sha256h2 v1.16b,v2.16b,v16.4s add v17.4s,v17.4s,v7.4s orr v2.16b,v0.16b,v0.16b .long 0x5e114020 //sha256h v0.16b,v1.16b,v17.4s .long 0x5e115041 //sha256h2 v1.16b,v2.16b,v17.4s add v0.4s,v0.4s,v18.4s add v1.4s,v1.4s,v19.4s cbnz x2,Loop_hw st1 {v0.4s,v1.4s},[x0] ldr x29,[sp],#16 ret #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
285,717
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-unroll8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include "openssl/arm_arch.h" #if __ARM_MAX_ARCH__>=8 .text .globl _aesv8_gcm_8x_enc_128 .private_extern _aesv8_gcm_8x_enc_128 .align 4 _aesv8_gcm_8x_enc_128: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#7] // kFlag_aesv8_gcm_8x_enc_128 #endif AARCH64_VALID_CALL_TARGET cbz x1, L128_enc_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 mov x5, x9 ld1 { v0.16b}, [x16] //CTR block 0 sub x5, x5, #1 //byte_len - 1 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 ldr q27, [x11, #160] //load rk10 aese v3.16b, v26.16b //AES block 8k+11 - round 9 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v6.16b, v26.16b //AES block 8k+14 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 add x5, x5, x0 aese v0.16b, v26.16b //AES block 8k+8 - round 9 aese v7.16b, v26.16b //AES block 8k+15 - round 9 aese v5.16b, v26.16b //AES block 8k+13 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 add x4, x0, x1, lsr #3 //end_input_ptr cmp x0, x5 //check if we have <= 8 blocks b.ge L128_enc_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load plaintext ldp q10, q11, [x0], #32 //AES block 2, 3 - load plaintext ldp q12, q13, [x0], #32 //AES block 4, 5 - load plaintext ldp q14, q15, [x0], #32 //AES block 6, 7 - load plaintext cmp x0, x5 //check if we have <= 8 blocks .long 0xce006d08 //eor3 v8.16b, v8.16b, v0.16b, v27.16b //AES block 0 - result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 .long 0xce016d29 //eor3 v9.16b, v9.16b, v1.16b, v27.16b //AES block 1 - result stp q8, q9, [x2], #32 //AES block 0, 1 - store result rev32 v1.16b, v30.16b //CTR block 9 .long 0xce056dad //eor3 v13.16b, v13.16b, v5.16b, v27.16b //AES block 5 - result add v30.4s, v30.4s, v31.4s //CTR block 9 .long 0xce026d4a //eor3 v10.16b, v10.16b, v2.16b, v27.16b //AES block 2 - result .long 0xce066dce //eor3 v14.16b, v14.16b, v6.16b, v27.16b //AES block 6 - result .long 0xce046d8c //eor3 v12.16b, v12.16b, v4.16b, v27.16b //AES block 4 - result rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 .long 0xce036d6b //eor3 v11.16b, v11.16b, v3.16b, v27.16b //AES block 3 - result .long 0xce076def //eor3 v15.16b, v15.16b, v7.16b,v27.16b //AES block 7 - result stp q10, q11, [x2], #32 //AES block 2, 3 - store result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 stp q12, q13, [x2], #32 //AES block 4, 5 - store result stp q14, q15, [x2], #32 //AES block 6, 7 - store result rev32 v4.16b, v30.16b //CTR block 12 add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge L128_enc_prepretail //do prepretail L128_enc_main_loop: //main loop start rev32 v5.16b, v30.16b //CTR block 8k+13 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v9.16b, v9.16b //GHASH block 8k+1 rev64 v8.16b, v8.16b //GHASH block 8k ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev64 v13.16b, v13.16b //GHASH block 8k+5 (t0, t1, t2 and t3 free) rev64 v11.16b, v11.16b //GHASH block 8k+3 ldp q26, q27, [x11, #0] //load rk0, rk1 eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v15.16b, v15.16b //GHASH block 8k+7 (t0, t1, t2 and t3 free) pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high rev64 v10.16b, v10.16b //GHASH block 8k+2 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h3l | h3h aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b,v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid ldp q28, q26, [x11, #32] //load rk2, rk3 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid rev64 v14.16b, v14.16b //GHASH block 8k+6 (t0, t1, and t2 free) .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k rev64 v12.16b, v12.16b //GHASH block 8k+4 (t0, t1, and t2 free) ldp q27, q28, [x11, #64] //load rk4, rk5 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h1l | h1h pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 ldp q26, q27, [x11, #96] //load rk6, rk7 trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid ldr d16, [x10] //MODULO - load modulo constant pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load plaintext aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 rev32 v20.16b, v30.16b //CTR block 8k+16 add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid ldp q28, q26, [x11, #128] //load rk8, rk9 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load plaintext aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 rev32 v22.16b, v30.16b //CTR block 8k+17 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load plaintext add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ldr q27, [x11, #160] //load rk10 ext v29.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment rev32 v23.16b, v30.16b //CTR block 8k+18 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load plaintext rev32 v25.16b, v30.16b //CTR block 8k+19 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 cmp x0, x5 //LOOP CONTROL .long 0xce046d8c //eor3 v12.16b, v12.16b, v4.16b, v27.16b //AES block 4 - result aese v7.16b, v26.16b //AES block 8k+15 - round 9 aese v6.16b, v26.16b //AES block 8k+14 - round 9 aese v3.16b, v26.16b //AES block 8k+11 - round 9 .long 0xce026d4a //eor3 v10.16b, v10.16b, v2.16b, v27.16b //AES block 8k+10 - result mov v2.16b, v23.16b //CTR block 8k+18 aese v0.16b, v26.16b //AES block 8k+8 - round 9 rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 .long 0xce076def //eor3 v15.16b, v15.16b, v7.16b, v27.16b //AES block 7 - result aese v5.16b, v26.16b //AES block 8k+13 - round 9 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low .long 0xce016d29 //eor3 v9.16b, v9.16b, v1.16b, v27.16b //AES block 8k+9 - result .long 0xce036d6b //eor3 v11.16b, v11.16b, v3.16b, v27.16b //AES block 8k+11 - result mov v3.16b, v25.16b //CTR block 8k+19 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .long 0xce056dad //eor3 v13.16b, v13.16b, v5.16b, v27.16b //AES block 5 - result mov v1.16b, v22.16b //CTR block 8k+17 .long 0xce006d08 //eor3 v8.16b, v8.16b, v0.16b, v27.16b //AES block 8k+8 - result mov v0.16b, v20.16b //CTR block 8k+16 stp q8, q9, [x2], #32 //AES block 8k+8, 8k+9 - store result stp q10, q11, [x2], #32 //AES block 8k+10, 8k+11 - store result .long 0xce066dce //eor3 v14.16b, v14.16b, v6.16b, v27.16b //AES block 6 - result stp q12, q13, [x2], #32 //AES block 8k+12, 8k+13 - store result .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low stp q14, q15, [x2], #32 //AES block 8k+14, 8k+15 - store result b.lt L128_enc_main_loop L128_enc_prepretail: //PREPRETAIL rev32 v5.16b, v30.16b //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev64 v8.16b, v8.16b //GHASH block 8k rev64 v9.16b, v9.16b //GHASH block 8k+1 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h6k | h5k add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v11.16b, v11.16b //GHASH block 8k+3 rev64 v10.16b, v10.16b //GHASH block 8k+2 eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v6.16b, v30.16b //CTR block 8k+14 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high rev64 v13.16b, v13.16b //GHASH block 8k+5 (t0, t1, t2 and t3 free) trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid ldp q26, q27, [x11, #0] //load rk0, rk1 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid rev64 v12.16b, v12.16b //GHASH block 8k+4 (t0, t1, and t2 free) rev64 v15.16b, v15.16b //GHASH block 8k+7 (t0, t1, t2 and t3 free) eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v14.16b, v14.16b //GHASH block 8k+6 (t0, t1, and t2 free) aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h ldp q28, q26, [x11, #32] //load rk2, rk3 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h1l | h1h trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high ldp q26, q27, [x11, #96] //load rk6, rk7 pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 ldr d16, [x10] //MODULO - load modulo constant aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ldp q28, q26, [x11, #128] //load rk8, rk9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 ext v29.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 ext v18.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 .long 0xce114a73 //eor3 v19.16b, v19.16b, v17.16b, v18.16b //MODULO - fold into low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 ldr q27, [x11, #160] //load rk10 aese v6.16b, v26.16b //AES block 8k+14 - round 9 aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v0.16b, v26.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 aese v3.16b, v26.16b //AES block 8k+11 - round 9 aese v5.16b, v26.16b //AES block 8k+13 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 aese v7.16b, v26.16b //AES block 8k+15 - round 9 L128_enc_tail: //TAIL sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process ldr q8, [x0], #16 //AES block 8k+8 - load plaintext mov v29.16b, v27.16b ldp q20, q21, [x6, #96] //load h5l | h5h .long 0xce007509 //eor3 v9.16b, v8.16b, v0.16b, v29.16b //AES block 8k+8 - result ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag ldp q22, q23, [x6, #128] //load h6l | h6h ldp q24, q25, [x6, #160] //load h8k | h7k cmp x5, #112 b.gt L128_enc_blocks_more_than_7 mov v7.16b, v6.16b mov v6.16b, v5.16b movi v17.8b, #0 cmp x5, #96 sub v30.4s, v30.4s, v31.4s mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v2.16b mov v2.16b, v1.16b movi v19.8b, #0 movi v18.8b, #0 b.gt L128_enc_blocks_more_than_6 mov v7.16b, v6.16b cmp x5, #80 sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b b.gt L128_enc_blocks_more_than_5 cmp x5, #64 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b b.gt L128_enc_blocks_more_than_4 mov v7.16b, v6.16b sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v1.16b cmp x5, #48 b.gt L128_enc_blocks_more_than_3 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v1.16b cmp x5, #32 ldr q24, [x6, #64] //load h4k | h3k b.gt L128_enc_blocks_more_than_2 cmp x5, #16 sub v30.4s, v30.4s, v31.4s mov v7.16b, v1.16b b.gt L128_enc_blocks_more_than_1 ldr q21, [x6, #16] //load h2k | h1k sub v30.4s, v30.4s, v31.4s b L128_enc_blocks_less_than_1 L128_enc_blocks_more_than_7: //blocks left > 7 st1 { v9.16b}, [x2], #16 //AES final-7 block - store result rev64 v8.16b, v9.16b //GHASH final-7 block ldr q9, [x0], #16 //AES final-6 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-7 block - mid pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high ins v18.d[0], v24.d[1] //GHASH final-7 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in .long 0xce017529 //eor3 v9.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low L128_enc_blocks_more_than_6: //blocks left > 6 st1 { v9.16b}, [x2], #16 //AES final-6 block - store result rev64 v8.16b, v9.16b //GHASH final-6 block ldr q9, [x0], #16 //AES final-5 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-6 block - mid .long 0xce027529 //eor3 v9.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high L128_enc_blocks_more_than_5: //blocks left > 5 st1 { v9.16b}, [x2], #16 //AES final-5 block - store result rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid ldr q9, [x0], #16 //AES final-4 block - load plaintext pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid ins v27.d[1], v27.d[0] //GHASH final-5 block - mid .long 0xce037529 //eor3 v9.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low movi v16.8b, #0 //supress further partial tag feed in pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid L128_enc_blocks_more_than_4: //blocks left > 4 st1 { v9.16b}, [x2], #16 //AES final-4 block - store result rev64 v8.16b, v9.16b //GHASH final-4 block ldr q9, [x0], #16 //AES final-3 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-4 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low .long 0xce047529 //eor3 v9.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid L128_enc_blocks_more_than_3: //blocks left > 3 st1 { v9.16b}, [x2], #16 //AES final-3 block - store result ldr q25, [x6, #80] //load h4l | h4h rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-3 block - mid ldr q24, [x6, #64] //load h4k | h3k pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low ldr q9, [x0], #16 //AES final-2 block - load plaintext eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ins v27.d[1], v27.d[0] //GHASH final-3 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low .long 0xce057529 //eor3 v9.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high L128_enc_blocks_more_than_2: //blocks left > 2 st1 { v9.16b}, [x2], #16 //AES final-2 block - store result rev64 v8.16b, v9.16b //GHASH final-2 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-1 block - load plaintext ins v27.d[0], v8.d[1] //GHASH final-2 block - mid ldr q23, [x6, #48] //load h3l | h3h movi v16.8b, #0 //supress further partial tag feed in eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid .long 0xce067529 //eor3 v9.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low L128_enc_blocks_more_than_1: //blocks left > 1 st1 { v9.16b}, [x2], #16 //AES final-1 block - store result ldr q22, [x6, #32] //load h2l | h2h rev64 v8.16b, v9.16b //GHASH final-1 block ldr q9, [x0], #16 //AES final block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-1 block - mid .long 0xce077529 //eor3 v9.16b, v9.16b, v7.16b, v29.16b //AES final block - result pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ldr q21, [x6, #16] //load h2k | h1k ins v27.d[1], v27.d[0] //GHASH final-1 block - mid pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low L128_enc_blocks_less_than_1: //blocks left <= 1 rev32 v30.16b, v30.16b str q30, [x16] //store the updated counter and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) mvn x7, xzr //temp0_x = 0xffffffffffffffff ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block mvn x8, xzr //temp1_x = 0xffffffffffffffff cmp x1, #64 csel x13, x8, x7, lt csel x14, x7, xzr, lt mov v0.d[1], x14 mov v0.d[0], x13 //ctr0b is mask for last block and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits rev64 v8.16b, v9.16b //GHASH final block bif v9.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing st1 { v9.16b}, [x2] //store all 16B eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v16.d[0], v8.d[1] //GHASH final block - mid eor v16.8b, v16.8b, v8.8b //GHASH final block - mid ldr q20, [x6] //load h1l | h1h pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v17.16b, v17.16b, v28.16b //GHASH final block - high eor v19.16b, v19.16b, v26.16b //GHASH final block - low ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret L128_enc_ret: mov w0, #0x0 ret .globl _aesv8_gcm_8x_dec_128 .private_extern _aesv8_gcm_8x_dec_128 .align 4 _aesv8_gcm_8x_dec_128: AARCH64_VALID_CALL_TARGET cbz x1, L128_dec_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 mov x5, x9 ld1 { v0.16b}, [x16] //CTR block 0 ldp q26, q27, [x11, #0] //load rk0, rk1 sub x5, x5, #1 //byte_len - 1 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b rev32 v30.16b, v0.16b //set up reversed counter aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 rev32 v7.16b, v30.16b //CTR block 7 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 add x5, x5, x0 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v0.16b, v26.16b //AES block 0 - round 9 aese v1.16b, v26.16b //AES block 1 - round 9 aese v6.16b, v26.16b //AES block 6 - round 9 ldr q27, [x11, #160] //load rk10 aese v4.16b, v26.16b //AES block 4 - round 9 aese v3.16b, v26.16b //AES block 3 - round 9 aese v2.16b, v26.16b //AES block 2 - round 9 aese v5.16b, v26.16b //AES block 5 - round 9 aese v7.16b, v26.16b //AES block 7 - round 9 add x4, x0, x1, lsr #3 //end_input_ptr cmp x0, x5 //check if we have <= 8 blocks b.ge L128_dec_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load ciphertext .long 0xce006d00 //eor3 v0.16b, v8.16b, v0.16b, v27.16b //AES block 0 - result .long 0xce016d21 //eor3 v1.16b, v9.16b, v1.16b, v27.16b //AES block 1 - result stp q0, q1, [x2], #32 //AES block 0, 1 - store result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 ldp q10, q11, [x0], #32 //AES block 2, 3 - load ciphertext ldp q12, q13, [x0], #32 //AES block 4, 5 - load ciphertext rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 ldp q14, q15, [x0], #32 //AES block 6, 7 - load ciphertext .long 0xce036d63 //eor3 v3.16b, v11.16b, v3.16b, v27.16b //AES block 3 - result .long 0xce026d42 //eor3 v2.16b, v10.16b, v2.16b, v27.16b //AES block 2 - result stp q2, q3, [x2], #32 //AES block 2, 3 - store result rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 .long 0xce066dc6 //eor3 v6.16b, v14.16b, v6.16b, v27.16b //AES block 6 - result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 .long 0xce046d84 //eor3 v4.16b, v12.16b, v4.16b, v27.16b //AES block 4 - result .long 0xce056da5 //eor3 v5.16b, v13.16b, v5.16b, v27.16b //AES block 5 - result stp q4, q5, [x2], #32 //AES block 4, 5 - store result .long 0xce076de7 //eor3 v7.16b, v15.16b, v7.16b, v27.16b //AES block 7 - result stp q6, q7, [x2], #32 //AES block 6, 7 - store result rev32 v4.16b, v30.16b //CTR block 12 cmp x0, x5 //check if we have <= 8 blocks add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge L128_dec_prepretail //do prepretail L128_dec_main_loop: //main loop start ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v9.16b, v9.16b //GHASH block 8k+1 rev64 v8.16b, v8.16b //GHASH block 8k ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v14.16b, v14.16b //GHASH block 8k+6 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v10.16b, v10.16b //GHASH block 8k+2 rev64 v12.16b, v12.16b //GHASH block 8k+4 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high rev64 v11.16b, v11.16b //GHASH block 8k+3 rev32 v7.16b, v30.16b //CTR block 8k+15 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid rev64 v13.16b, v13.16b //GHASH block 8k+5 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high ldp q28, q26, [x11, #32] //load rk2, rk3 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 rev64 v15.16b, v15.16b //GHASH block 8k+7 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high ldp q27, q28, [x11, #64] //load rk4, rk5 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid ldp q26, q27, [x11, #96] //load rk6, rk7 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 ldr d16, [x10] //MODULO - load modulo constant .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 rev32 v20.16b, v30.16b //CTR block 8k+16 .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 rev32 v22.16b, v30.16b //CTR block 8k+17 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load ciphertext ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load ciphertext aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 rev32 v23.16b, v30.16b //CTR block 8k+18 ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load ciphertext aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load ciphertext aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v0.16b, v26.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 ldr q27, [x11, #160] //load rk10 aese v6.16b, v26.16b //AES block 8k+14 - round 9 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v7.16b, v26.16b //AES block 8k+15 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment rev32 v25.16b, v30.16b //CTR block 8k+19 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 aese v3.16b, v26.16b //AES block 8k+11 - round 9 aese v5.16b, v26.16b //AES block 8k+13 - round 9 .long 0xce016d21 //eor3 v1.16b, v9.16b, v1.16b, v27.16b //AES block 8k+9 - result .long 0xce006d00 //eor3 v0.16b, v8.16b, v0.16b, v27.16b //AES block 8k+8 - result .long 0xce076de7 //eor3 v7.16b, v15.16b, v7.16b, v27.16b //AES block 8k+15 - result .long 0xce066dc6 //eor3 v6.16b, v14.16b, v6.16b, v27.16b //AES block 8k+14 - result .long 0xce026d42 //eor3 v2.16b, v10.16b, v2.16b, v27.16b //AES block 8k+10 - result stp q0, q1, [x2], #32 //AES block 8k+8, 8k+9 - store result mov v1.16b, v22.16b //CTR block 8k+17 .long 0xce046d84 //eor3 v4.16b, v12.16b, v4.16b, v27.16b //AES block 8k+12 - result .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low mov v0.16b, v20.16b //CTR block 8k+16 .long 0xce036d63 //eor3 v3.16b, v11.16b, v3.16b, v27.16b //AES block 8k+11 - result cmp x0, x5 //LOOP CONTROL stp q2, q3, [x2], #32 //AES block 8k+10, 8k+11 - store result .long 0xce056da5 //eor3 v5.16b, v13.16b, v5.16b, v27.16b //AES block 8k+13 - result mov v2.16b, v23.16b //CTR block 8k+18 stp q4, q5, [x2], #32 //AES block 8k+12, 8k+13 - store result rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 stp q6, q7, [x2], #32 //AES block 8k+14, 8k+15 - store result mov v3.16b, v25.16b //CTR block 8k+19 b.lt L128_dec_main_loop L128_dec_prepretail: //PREPRETAIL rev64 v11.16b, v11.16b //GHASH block 8k+3 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v8.16b, v8.16b //GHASH block 8k rev64 v10.16b, v10.16b //GHASH block 8k+2 rev32 v5.16b, v30.16b //CTR block 8k+13 ldp q26, q27, [x11, #0] //load rk0, rk1 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h eor v8.16b, v8.16b, v19.16b //PRE 1 rev64 v9.16b, v9.16b //GHASH block 8k+1 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev64 v13.16b, v13.16b //GHASH block 8k+5 rev64 v12.16b, v12.16b //GHASH block 8k+4 rev64 v14.16b, v14.16b //GHASH block 8k+6 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low rev32 v7.16b, v30.16b //CTR block 8k+15 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 ldp q28, q26, [x11, #32] //load rk2, rk3 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid ldp q27, q28, [x11, #64] //load rk4, rk5 rev64 v15.16b, v15.16b //GHASH block 8k+7 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid ldp q26, q27, [x11, #96] //load rk6, rk7 .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 ldr d16, [x10] //MODULO - load modulo constant pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ldp q28, q26, [x11, #128] //load rk8, rk9 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid ldr q27, [x11, #160] //load rk10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v6.16b, v26.16b //AES block 8k+14 - round 9 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v2.16b, v26.16b //AES block 8k+10 - round 9 aese v3.16b, v26.16b //AES block 8k+11 - round 9 aese v5.16b, v26.16b //AES block 8k+13 - round 9 aese v0.16b, v26.16b //AES block 8k+8 - round 9 aese v4.16b, v26.16b //AES block 8k+12 - round 9 aese v1.16b, v26.16b //AES block 8k+9 - round 9 aese v7.16b, v26.16b //AES block 8k+15 - round 9 L128_dec_tail: //TAIL mov v29.16b, v27.16b sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process cmp x5, #112 ldp q24, q25, [x6, #160] //load h8k | h7k ldr q9, [x0], #16 //AES block 8k+8 - load ciphertext ldp q20, q21, [x6, #96] //load h5l | h5h ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag ldp q22, q23, [x6, #128] //load h6l | h6h .long 0xce00752c //eor3 v12.16b, v9.16b, v0.16b, v29.16b //AES block 8k+8 - result b.gt L128_dec_blocks_more_than_7 cmp x5, #96 mov v7.16b, v6.16b movi v19.8b, #0 movi v17.8b, #0 mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v2.16b mov v2.16b, v1.16b movi v18.8b, #0 sub v30.4s, v30.4s, v31.4s b.gt L128_dec_blocks_more_than_6 cmp x5, #80 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b b.gt L128_dec_blocks_more_than_5 cmp x5, #64 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt L128_dec_blocks_more_than_4 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v1.16b cmp x5, #48 b.gt L128_dec_blocks_more_than_3 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b cmp x5, #32 ldr q24, [x6, #64] //load h4k | h3k mov v6.16b, v1.16b b.gt L128_dec_blocks_more_than_2 cmp x5, #16 mov v7.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt L128_dec_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b L128_dec_blocks_less_than_1 L128_dec_blocks_more_than_7: //blocks left > 7 rev64 v8.16b, v9.16b //GHASH final-7 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v18.d[0], v24.d[1] //GHASH final-7 block - mid pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low ins v27.d[0], v8.d[1] //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in ldr q9, [x0], #16 //AES final-6 block - load ciphertext eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high st1 { v12.16b}, [x2], #16 //AES final-7 block - store result .long 0xce01752c //eor3 v12.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid L128_dec_blocks_more_than_6: //blocks left > 6 rev64 v8.16b, v9.16b //GHASH final-6 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-6 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low ldr q9, [x0], #16 //AES final-5 block - load ciphertext movi v16.8b, #0 //supress further partial tag feed in pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid st1 { v12.16b}, [x2], #16 //AES final-6 block - store result pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid .long 0xce02752c //eor3 v12.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result L128_dec_blocks_more_than_5: //blocks left > 5 rev64 v8.16b, v9.16b //GHASH final-5 block ldr q9, [x0], #16 //AES final-4 block - load ciphertext st1 { v12.16b}, [x2], #16 //AES final-5 block - store result eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid .long 0xce03752c //eor3 v12.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid ins v27.d[1], v27.d[0] //GHASH final-5 block - mid pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low movi v16.8b, #0 //supress further partial tag feed in pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high L128_dec_blocks_more_than_4: //blocks left > 4 rev64 v8.16b, v9.16b //GHASH final-4 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-3 block - load ciphertext ins v27.d[0], v8.d[1] //GHASH final-4 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high st1 { v12.16b}, [x2], #16 //AES final-4 block - store result eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid .long 0xce04752c //eor3 v12.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid L128_dec_blocks_more_than_3: //blocks left > 3 st1 { v12.16b}, [x2], #16 //AES final-3 block - store result rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-3 block - mid ldr q25, [x6, #80] //load h4l | h4h ldr q24, [x6, #64] //load h4k | h3k eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ldr q9, [x0], #16 //AES final-2 block - load ciphertext ins v27.d[1], v27.d[0] //GHASH final-3 block - mid pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high movi v16.8b, #0 //supress further partial tag feed in .long 0xce05752c //eor3 v12.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid L128_dec_blocks_more_than_2: //blocks left > 2 rev64 v8.16b, v9.16b //GHASH final-2 block st1 { v12.16b}, [x2], #16 //AES final-2 block - store result eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q23, [x6, #48] //load h3l | h3h movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-2 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid ldr q9, [x0], #16 //AES final-1 block - load ciphertext eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low .long 0xce06752c //eor3 v12.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high L128_dec_blocks_more_than_1: //blocks left > 1 st1 { v12.16b}, [x2], #16 //AES final-1 block - store result rev64 v8.16b, v9.16b //GHASH final-1 block ldr q22, [x6, #32] //load h2l | h2h eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-1 block - mid ldr q9, [x0], #16 //AES final block - load ciphertext pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high ldr q21, [x6, #16] //load h2k | h1k ins v27.d[1], v27.d[0] //GHASH final-1 block - mid .long 0xce07752c //eor3 v12.16b, v9.16b, v7.16b, v29.16b //AES final block - result pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid L128_dec_blocks_less_than_1: //blocks left <= 1 and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 mvn x8, xzr //temp1_x = 0xffffffffffffffff csel x13, x8, x7, lt csel x14, x7, xzr, lt mov v0.d[1], x14 mov v0.d[0], x13 //ctr0b is mask for last block ldr q20, [x6] //load h1l | h1h ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits rev64 v8.16b, v9.16b //GHASH final block eor v8.16b, v8.16b, v16.16b //feed in partial tag pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high ins v16.d[0], v8.d[1] //GHASH final block - mid eor v17.16b, v17.16b, v28.16b //GHASH final block - high eor v16.8b, v16.8b, v8.8b //GHASH final block - mid bif v12.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid st1 { v12.16b}, [x2] //store all 16B pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant eor v19.16b, v19.16b, v26.16b //GHASH final block - low eor v14.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid ext v17.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment eor v18.16b, v18.16b, v14.16b //MODULO - karatsuba tidy up .long 0xce115652 //eor3 v18.16b, v18.16b, v17.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v18.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .long 0xce124673 //eor3 v19.16b, v19.16b, v18.16b, v17.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] rev32 v30.16b, v30.16b str q30, [x16] //store the updated counter mov x0, x9 ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret L128_dec_ret: mov w0, #0x0 ret .globl _aesv8_gcm_8x_enc_192 .private_extern _aesv8_gcm_8x_enc_192 .align 4 _aesv8_gcm_8x_enc_192: AARCH64_VALID_CALL_TARGET cbz x1, L192_enc_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 mov x5, x9 ld1 { v0.16b}, [x16] //CTR block 0 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 sub x5, x5, #1 //byte_len - 1 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 ldp q26, q27, [x11, #0] //load rk0, rk1 add x5, x5, x0 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 add x4, x0, x1, lsr #3 //end_input_ptr cmp x0, x5 //check if we have <= 8 blocks aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 9 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b ldp q27, q28, [x11, #160] //load rk10, rk11 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 9 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 14 - round 10 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 11 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 9 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 13 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 12 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 10 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 15 - round 10 aese v6.16b, v28.16b //AES block 14 - round 11 aese v3.16b, v28.16b //AES block 11 - round 11 aese v4.16b, v28.16b //AES block 12 - round 11 aese v7.16b, v28.16b //AES block 15 - round 11 ldr q26, [x11, #192] //load rk12 aese v1.16b, v28.16b //AES block 9 - round 11 aese v5.16b, v28.16b //AES block 13 - round 11 aese v2.16b, v28.16b //AES block 10 - round 11 aese v0.16b, v28.16b //AES block 8 - round 11 b.ge L192_enc_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load plaintext ldp q10, q11, [x0], #32 //AES block 2, 3 - load plaintext ldp q12, q13, [x0], #32 //AES block 4, 5 - load plaintext ldp q14, q15, [x0], #32 //AES block 6, 7 - load plaintext .long 0xce006908 //eor3 v8.16b, v8.16b, v0.16b, v26.16b //AES block 0 - result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 .long 0xce03696b //eor3 v11.16b, v11.16b, v3.16b, v26.16b //AES block 3 - result .long 0xce016929 //eor3 v9.16b, v9.16b, v1.16b, v26.16b //AES block 1 - result rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 .long 0xce04698c //eor3 v12.16b, v12.16b, v4.16b, v26.16b //AES block 4 - result .long 0xce0569ad //eor3 v13.16b, v13.16b, v5.16b, v26.16b //AES block 5 - result .long 0xce0769ef //eor3 v15.16b, v15.16b, v7.16b, v26.16b //AES block 7 - result stp q8, q9, [x2], #32 //AES block 0, 1 - store result .long 0xce02694a //eor3 v10.16b, v10.16b, v2.16b, v26.16b //AES block 2 - result rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 stp q10, q11, [x2], #32 //AES block 2, 3 - store result cmp x0, x5 //check if we have <= 8 blocks rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 .long 0xce0669ce //eor3 v14.16b, v14.16b, v6.16b, v26.16b //AES block 6 - result stp q12, q13, [x2], #32 //AES block 4, 5 - store result rev32 v4.16b, v30.16b //CTR block 12 stp q14, q15, [x2], #32 //AES block 6, 7 - store result add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge L192_enc_prepretail //do prepretail L192_enc_main_loop: //main loop start rev64 v12.16b, v12.16b //GHASH block 8k+4 (t0, t1, and t2 free) ldp q26, q27, [x11, #0] //load rk0, rk1 rev64 v10.16b, v10.16b //GHASH block 8k+2 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v8.16b, v8.16b //GHASH block 8k ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev64 v9.16b, v9.16b //GHASH block 8k+1 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 eor v8.16b, v8.16b, v19.16b //PRE 1 rev64 v11.16b, v11.16b //GHASH block 8k+3 rev64 v13.16b, v13.16b //GHASH block 8k+5 (t0, t1, t2 and t3 free) aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 rev32 v7.16b, v30.16b //CTR block 8k+15 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h ldp q26, q27, [x11, #96] //load rk6, rk7 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 (t0, t1, t2 and t3 free) rev64 v14.16b, v14.16b //GHASH block 8k+6 (t0, t1, and t2 free) pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid ldp q28, q26, [x11, #128] //load rk8, rk9 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 ldr d16, [x10] //MODULO - load modulo constant .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 ldp q27, q28, [x11, #160] //load rk10, rk11 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low rev32 v20.16b, v30.16b //CTR block 8k+16 add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load plaintext pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid rev32 v22.16b, v30.16b //CTR block 8k+17 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 ldr q26, [x11, #192] //load rk12 ext v29.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load plaintext aese v4.16b, v28.16b //AES block 8k+12 - round 11 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load plaintext ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load plaintext aese v2.16b, v28.16b //AES block 8k+10 - round 11 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 rev32 v23.16b, v30.16b //CTR block 8k+18 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v5.16b, v28.16b //AES block 8k+13 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 aese v7.16b, v28.16b //AES block 8k+15 - round 11 aese v0.16b, v28.16b //AES block 8k+8 - round 11 .long 0xce04698c //eor3 v12.16b, v12.16b, v4.16b, v26.16b //AES block 4 - result aese v6.16b, v28.16b //AES block 8k+14 - round 11 aese v3.16b, v28.16b //AES block 8k+11 - round 11 aese v1.16b, v28.16b //AES block 8k+9 - round 11 rev32 v25.16b, v30.16b //CTR block 8k+19 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 .long 0xce0769ef //eor3 v15.16b, v15.16b, v7.16b, v26.16b //AES block 7 - result .long 0xce02694a //eor3 v10.16b, v10.16b, v2.16b, v26.16b //AES block 8k+10 - result .long 0xce006908 //eor3 v8.16b, v8.16b, v0.16b, v26.16b //AES block 8k+8 - result mov v2.16b, v23.16b //CTR block 8k+18 .long 0xce016929 //eor3 v9.16b, v9.16b, v1.16b, v26.16b //AES block 8k+9 - result mov v1.16b, v22.16b //CTR block 8k+17 stp q8, q9, [x2], #32 //AES block 8k+8, 8k+9 - store result ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .long 0xce0669ce //eor3 v14.16b, v14.16b, v6.16b, v26.16b //AES block 6 - result mov v0.16b, v20.16b //CTR block 8k+16 rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 .long 0xce0569ad //eor3 v13.16b, v13.16b, v5.16b, v26.16b //AES block 5 - result .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low .long 0xce03696b //eor3 v11.16b, v11.16b, v3.16b, v26.16b //AES block 8k+11 - result mov v3.16b, v25.16b //CTR block 8k+19 stp q10, q11, [x2], #32 //AES block 8k+10, 8k+11 - store result stp q12, q13, [x2], #32 //AES block 8k+12, 8k+13 - store result cmp x0, x5 //LOOP CONTROL stp q14, q15, [x2], #32 //AES block 8k+14, 8k+15 - store result b.lt L192_enc_main_loop L192_enc_prepretail: //PREPRETAIL rev32 v5.16b, v30.16b //CTR block 8k+13 ldp q26, q27, [x11, #0] //load rk0, rk1 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v8.16b, v8.16b //GHASH block 8k ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev64 v11.16b, v11.16b //GHASH block 8k+3 rev64 v10.16b, v10.16b //GHASH block 8k+2 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v9.16b, v9.16b //GHASH block 8k+1 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 rev64 v13.16b, v13.16b //GHASH block 8k+5 (t0, t1, t2 and t3 free) rev64 v14.16b, v14.16b //GHASH block 8k+6 (t0, t1, and t2 free) aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid ldp q27, q28, [x11, #64] //load rk4, rk5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 rev64 v12.16b, v12.16b //GHASH block 8k+4 (t0, t1, and t2 free) aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 (t0, t1, t2 and t3 free) ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 ldr d16, [x10] //MODULO - load modulo constant aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ext v29.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 ldp q27, q28, [x11, #160] //load rk10, rk11 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ldr q26, [x11, #192] //load rk12 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v1.16b, v28.16b //AES block 8k+9 - round 11 aese v7.16b, v28.16b //AES block 8k+15 - round 11 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v3.16b, v28.16b //AES block 8k+11 - round 11 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v2.16b, v28.16b //AES block 8k+10 - round 11 aese v0.16b, v28.16b //AES block 8k+8 - round 11 aese v6.16b, v28.16b //AES block 8k+14 - round 11 aese v4.16b, v28.16b //AES block 8k+12 - round 11 aese v5.16b, v28.16b //AES block 8k+13 - round 11 L192_enc_tail: //TAIL ldp q20, q21, [x6, #96] //load h5l | h5h sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process ldr q8, [x0], #16 //AES block 8k+8 - l3ad plaintext ldp q24, q25, [x6, #160] //load h8k | h7k mov v29.16b, v26.16b ldp q22, q23, [x6, #128] //load h6l | h6h cmp x5, #112 .long 0xce007509 //eor3 v9.16b, v8.16b, v0.16b, v29.16b //AES block 8k+8 - result ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag b.gt L192_enc_blocks_more_than_7 cmp x5, #96 mov v7.16b, v6.16b movi v17.8b, #0 mov v6.16b, v5.16b movi v19.8b, #0 sub v30.4s, v30.4s, v31.4s mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v2.16b mov v2.16b, v1.16b movi v18.8b, #0 b.gt L192_enc_blocks_more_than_6 mov v7.16b, v6.16b cmp x5, #80 mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt L192_enc_blocks_more_than_5 cmp x5, #64 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b b.gt L192_enc_blocks_more_than_4 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v1.16b sub v30.4s, v30.4s, v31.4s cmp x5, #48 b.gt L192_enc_blocks_more_than_3 mov v7.16b, v6.16b mov v6.16b, v1.16b sub v30.4s, v30.4s, v31.4s ldr q24, [x6, #64] //load h4k | h3k cmp x5, #32 b.gt L192_enc_blocks_more_than_2 sub v30.4s, v30.4s, v31.4s cmp x5, #16 mov v7.16b, v1.16b b.gt L192_enc_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b L192_enc_blocks_less_than_1 L192_enc_blocks_more_than_7: //blocks left > 7 st1 { v9.16b}, [x2], #16 //AES final-7 block - store result rev64 v8.16b, v9.16b //GHASH final-7 block ins v18.d[0], v24.d[1] //GHASH final-7 block - mid eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-7 block - mid ldr q9, [x0], #16 //AES final-6 block - load plaintext eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid .long 0xce017529 //eor3 v9.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result L192_enc_blocks_more_than_6: //blocks left > 6 st1 { v9.16b}, [x2], #16 //AES final-6 block - store result rev64 v8.16b, v9.16b //GHASH final-6 block ldr q9, [x0], #16 //AES final-5 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-6 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low .long 0xce027529 //eor3 v9.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid L192_enc_blocks_more_than_5: //blocks left > 5 st1 { v9.16b}, [x2], #16 //AES final-5 block - store result rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid ldr q9, [x0], #16 //AES final-4 block - load plaintext pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high ins v27.d[1], v27.d[0] //GHASH final-5 block - mid pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid .long 0xce037529 //eor3 v9.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid L192_enc_blocks_more_than_4: //blocks left > 4 st1 { v9.16b}, [x2], #16 //AES final-4 block - store result rev64 v8.16b, v9.16b //GHASH final-4 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-3 block - load plaintext pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high ins v27.d[0], v8.d[1] //GHASH final-4 block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid .long 0xce047529 //eor3 v9.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result L192_enc_blocks_more_than_3: //blocks left > 3 ldr q24, [x6, #64] //load h4k | h3k st1 { v9.16b}, [x2], #16 //AES final-3 block - store result rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ldr q9, [x0], #16 //AES final-2 block - load plaintext ldr q25, [x6, #80] //load h4l | h4h ins v27.d[0], v8.d[1] //GHASH final-3 block - mid .long 0xce057529 //eor3 v9.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ins v27.d[1], v27.d[0] //GHASH final-3 block - mid pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high L192_enc_blocks_more_than_2: //blocks left > 2 st1 { v9.16b}, [x2], #16 //AES final-2 block - store result rev64 v8.16b, v9.16b //GHASH final-2 block ldr q23, [x6, #48] //load h3l | h3h eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-1 block - load plaintext ins v27.d[0], v8.d[1] //GHASH final-2 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high movi v16.8b, #0 //supress further partial tag feed in pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid .long 0xce067529 //eor3 v9.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result L192_enc_blocks_more_than_1: //blocks left > 1 ldr q22, [x6, #32] //load h1l | h1h st1 { v9.16b}, [x2], #16 //AES final-1 block - store result rev64 v8.16b, v9.16b //GHASH final-1 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-1 block - mid pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ldr q9, [x0], #16 //AES final block - load plaintext ldr q21, [x6, #16] //load h2k | h1k ins v27.d[1], v27.d[0] //GHASH final-1 block - mid .long 0xce077529 //eor3 v9.16b, v9.16b, v7.16b, v29.16b //AES final block - result pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high L192_enc_blocks_less_than_1: //blocks left <= 1 mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 mvn x8, xzr //temp1_x = 0xffffffffffffffff csel x13, x8, x7, lt csel x14, x7, xzr, lt mov v0.d[1], x14 ldr q20, [x6] //load h1l | h1h ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored mov v0.d[0], x13 //ctr0b is mask for last block and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits rev64 v8.16b, v9.16b //GHASH final block bif v9.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing st1 { v9.16b}, [x2] //store all 16B eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v16.d[0], v8.d[1] //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high eor v17.16b, v17.16b, v28.16b //GHASH final block - high pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v16.8b, v16.8b, v8.8b //GHASH final block - mid pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant eor v19.16b, v19.16b, v26.16b //GHASH final block - low ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment rev32 v30.16b, v30.16b str q30, [x16] //store the updated counter .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 //return sizes ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret L192_enc_ret: mov w0, #0x0 ret .globl _aesv8_gcm_8x_dec_192 .private_extern _aesv8_gcm_8x_dec_192 .align 4 _aesv8_gcm_8x_dec_192: AARCH64_VALID_CALL_TARGET cbz x1, L192_dec_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 mov x5, x9 ld1 { v0.16b}, [x16] //CTR block 0 ld1 { v19.16b}, [x3] mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 sub x5, x5, #1 //byte_len - 1 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 add x4, x0, x1, lsr #3 //end_input_ptr aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 9 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b ldp q27, q28, [x11, #160] //load rk10, rk11 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 9 add x5, x5, x0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 9 cmp x0, x5 //check if we have <= 8 blocks aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 10 ldr q26, [x11, #192] //load rk12 aese v0.16b, v28.16b //AES block 0 - round 11 aese v1.16b, v28.16b //AES block 1 - round 11 aese v4.16b, v28.16b //AES block 4 - round 11 aese v6.16b, v28.16b //AES block 6 - round 11 aese v5.16b, v28.16b //AES block 5 - round 11 aese v7.16b, v28.16b //AES block 7 - round 11 aese v2.16b, v28.16b //AES block 2 - round 11 aese v3.16b, v28.16b //AES block 3 - round 11 b.ge L192_dec_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load ciphertext ldp q10, q11, [x0], #32 //AES block 2, 3 - load ciphertext ldp q12, q13, [x0], #32 //AES block 4, 5 - load ciphertext .long 0xce016921 //eor3 v1.16b, v9.16b, v1.16b, v26.16b //AES block 1 - result .long 0xce006900 //eor3 v0.16b, v8.16b, v0.16b, v26.16b //AES block 0 - result stp q0, q1, [x2], #32 //AES block 0, 1 - store result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 .long 0xce036963 //eor3 v3.16b, v11.16b, v3.16b, v26.16b //AES block 3 - result .long 0xce026942 //eor3 v2.16b, v10.16b, v2.16b, v26.16b //AES block 2 - result stp q2, q3, [x2], #32 //AES block 2, 3 - store result ldp q14, q15, [x0], #32 //AES block 6, 7 - load ciphertext rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 .long 0xce046984 //eor3 v4.16b, v12.16b, v4.16b, v26.16b //AES block 4 - result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 .long 0xce0569a5 //eor3 v5.16b, v13.16b, v5.16b, v26.16b //AES block 5 - result stp q4, q5, [x2], #32 //AES block 4, 5 - store result cmp x0, x5 //check if we have <= 8 blocks .long 0xce0669c6 //eor3 v6.16b, v14.16b, v6.16b, v26.16b //AES block 6 - result .long 0xce0769e7 //eor3 v7.16b, v15.16b, v7.16b, v26.16b //AES block 7 - result rev32 v4.16b, v30.16b //CTR block 12 add v30.4s, v30.4s, v31.4s //CTR block 12 stp q6, q7, [x2], #32 //AES block 6, 7 - store result b.ge L192_dec_prepretail //do prepretail L192_dec_main_loop: //main loop start rev64 v9.16b, v9.16b //GHASH block 8k+1 ldp q26, q27, [x11, #0] //load rk0, rk1 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v8.16b, v8.16b //GHASH block 8k rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v12.16b, v12.16b //GHASH block 8k+4 rev64 v11.16b, v11.16b //GHASH block 8k+3 eor v8.16b, v8.16b, v19.16b //PRE 1 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 rev64 v13.16b, v13.16b //GHASH block 8k+5 rev32 v7.16b, v30.16b //CTR block 8k+15 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high ldp q28, q26, [x11, #32] //load rk2, rk3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid rev64 v10.16b, v10.16b //GHASH block 8k+2 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid add v30.4s, v30.4s, v31.4s //CTR block 8k+15 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 rev64 v14.16b, v14.16b //GHASH block 8k+6 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid ldr d16, [x10] //MODULO - load modulo constant pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high rev32 v20.16b, v30.16b //CTR block 8k+16 add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 ldp q27, q28, [x11, #160] //load rk10, rk11 .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load ciphertext aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load ciphertext rev32 v22.16b, v30.16b //CTR block 8k+17 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load ciphertext rev32 v23.16b, v30.16b //CTR block 8k+18 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 ldr q26, [x11, #192] //load rk12 ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load ciphertext aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v0.16b, v28.16b //AES block 8k+8 - round 11 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v1.16b, v28.16b //AES block 8k+9 - round 11 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v6.16b, v28.16b //AES block 8k+14 - round 11 aese v3.16b, v28.16b //AES block 8k+11 - round 11 .long 0xce006900 //eor3 v0.16b, v8.16b, v0.16b, v26.16b //AES block 8k+8 - result rev32 v25.16b, v30.16b //CTR block 8k+19 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v4.16b, v28.16b //AES block 8k+12 - round 11 aese v2.16b, v28.16b //AES block 8k+10 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 aese v7.16b, v28.16b //AES block 8k+15 - round 11 aese v5.16b, v28.16b //AES block 8k+13 - round 11 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low .long 0xce016921 //eor3 v1.16b, v9.16b, v1.16b, v26.16b //AES block 8k+9 - result stp q0, q1, [x2], #32 //AES block 8k+8, 8k+9 - store result .long 0xce036963 //eor3 v3.16b, v11.16b, v3.16b, v26.16b //AES block 8k+11 - result .long 0xce026942 //eor3 v2.16b, v10.16b, v2.16b, v26.16b //AES block 8k+10 - result .long 0xce0769e7 //eor3 v7.16b, v15.16b, v7.16b, v26.16b //AES block 8k+15 - result stp q2, q3, [x2], #32 //AES block 8k+10, 8k+11 - store result .long 0xce0569a5 //eor3 v5.16b, v13.16b, v5.16b, v26.16b //AES block 8k+13 - result .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low mov v3.16b, v25.16b //CTR block 8k+19 .long 0xce046984 //eor3 v4.16b, v12.16b, v4.16b, v26.16b //AES block 8k+12 - result stp q4, q5, [x2], #32 //AES block 8k+12, 8k+13 - store result cmp x0, x5 //LOOP CONTROL .long 0xce0669c6 //eor3 v6.16b, v14.16b, v6.16b, v26.16b //AES block 8k+14 - result stp q6, q7, [x2], #32 //AES block 8k+14, 8k+15 - store result mov v0.16b, v20.16b //CTR block 8k+16 mov v1.16b, v22.16b //CTR block 8k+17 mov v2.16b, v23.16b //CTR block 8k+18 rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 b.lt L192_dec_main_loop L192_dec_prepretail: //PREPRETAIL ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v8.16b, v8.16b //GHASH block 8k ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v11.16b, v11.16b //GHASH block 8k+3 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 eor v8.16b, v8.16b, v19.16b //PRE 1 rev64 v10.16b, v10.16b //GHASH block 8k+2 rev64 v9.16b, v9.16b //GHASH block 8k+1 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev32 v7.16b, v30.16b //CTR block 8k+15 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 rev64 v13.16b, v13.16b //GHASH block 8k+5 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low ldp q27, q28, [x11, #64] //load rk4, rk5 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 rev64 v15.16b, v15.16b //GHASH block 8k+7 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid rev64 v12.16b, v12.16b //GHASH block 8k+4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 rev64 v14.16b, v14.16b //GHASH block 8k+6 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 ldr d16, [x10] //MODULO - load modulo constant .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 ldp q27, q28, [x11, #160] //load rk10, rk11 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ldr q26, [x11, #192] //load rk12 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v0.16b, v28.16b //AES block 8k+8 - round 11 .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low aese v5.16b, v28.16b //AES block 8k+13 - round 11 aese v2.16b, v28.16b //AES block 8k+10 - round 11 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v6.16b, v28.16b //AES block 8k+14 - round 11 aese v4.16b, v28.16b //AES block 8k+12 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v3.16b, v28.16b //AES block 8k+11 - round 11 aese v1.16b, v28.16b //AES block 8k+9 - round 11 aese v7.16b, v28.16b //AES block 8k+15 - round 11 L192_dec_tail: //TAIL sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process ldp q20, q21, [x6, #96] //load h5l | h5h ldr q9, [x0], #16 //AES block 8k+8 - load ciphertext ldp q24, q25, [x6, #160] //load h8k | h7k mov v29.16b, v26.16b ldp q22, q23, [x6, #128] //load h6l | h6h ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag .long 0xce00752c //eor3 v12.16b, v9.16b, v0.16b, v29.16b //AES block 8k+8 - result cmp x5, #112 b.gt L192_dec_blocks_more_than_7 mov v7.16b, v6.16b movi v17.8b, #0 sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b cmp x5, #96 movi v19.8b, #0 mov v3.16b, v2.16b mov v2.16b, v1.16b movi v18.8b, #0 b.gt L192_dec_blocks_more_than_6 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b sub v30.4s, v30.4s, v31.4s cmp x5, #80 b.gt L192_dec_blocks_more_than_5 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b cmp x5, #64 sub v30.4s, v30.4s, v31.4s b.gt L192_dec_blocks_more_than_4 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v1.16b cmp x5, #48 b.gt L192_dec_blocks_more_than_3 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b cmp x5, #32 mov v6.16b, v1.16b ldr q24, [x6, #64] //load h4k | h3k b.gt L192_dec_blocks_more_than_2 sub v30.4s, v30.4s, v31.4s mov v7.16b, v1.16b cmp x5, #16 b.gt L192_dec_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b L192_dec_blocks_less_than_1 L192_dec_blocks_more_than_7: //blocks left > 7 rev64 v8.16b, v9.16b //GHASH final-7 block ins v18.d[0], v24.d[1] //GHASH final-7 block - mid eor v8.16b, v8.16b, v16.16b //feed in partial tag pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high ins v27.d[0], v8.d[1] //GHASH final-7 block - mid ldr q9, [x0], #16 //AES final-6 block - load ciphertext pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid st1 { v12.16b}, [x2], #16 //AES final-7 block - store result .long 0xce01752c //eor3 v12.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in L192_dec_blocks_more_than_6: //blocks left > 6 rev64 v8.16b, v9.16b //GHASH final-6 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-5 block - load ciphertext ins v27.d[0], v8.d[1] //GHASH final-6 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high st1 { v12.16b}, [x2], #16 //AES final-6 block - store result .long 0xce02752c //eor3 v12.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low L192_dec_blocks_more_than_5: //blocks left > 5 rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid ins v27.d[1], v27.d[0] //GHASH final-5 block - mid pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high ldr q9, [x0], #16 //AES final-4 block - load ciphertext eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low movi v16.8b, #0 //supress further partial tag feed in st1 { v12.16b}, [x2], #16 //AES final-5 block - store result eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid .long 0xce03752c //eor3 v12.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result L192_dec_blocks_more_than_4: //blocks left > 4 rev64 v8.16b, v9.16b //GHASH final-4 block eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ldr q9, [x0], #16 //AES final-3 block - load ciphertext ins v27.d[0], v8.d[1] //GHASH final-4 block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid st1 { v12.16b}, [x2], #16 //AES final-4 block - store result pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high .long 0xce04752c //eor3 v12.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high L192_dec_blocks_more_than_3: //blocks left > 3 ldr q25, [x6, #80] //load h4l | h4h rev64 v8.16b, v9.16b //GHASH final-3 block ldr q9, [x0], #16 //AES final-2 block - load ciphertext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-3 block - mid pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high movi v16.8b, #0 //supress further partial tag feed in pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low st1 { v12.16b}, [x2], #16 //AES final-3 block - store result eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid .long 0xce05752c //eor3 v12.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low ldr q24, [x6, #64] //load h4k | h3k ins v27.d[1], v27.d[0] //GHASH final-3 block - mid pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid L192_dec_blocks_more_than_2: //blocks left > 2 rev64 v8.16b, v9.16b //GHASH final-2 block ldr q23, [x6, #48] //load h3l | h3h eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-2 block - mid ldr q9, [x0], #16 //AES final-1 block - load ciphertext pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low st1 { v12.16b}, [x2], #16 //AES final-2 block - store result eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid .long 0xce06752c //eor3 v12.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result L192_dec_blocks_more_than_1: //blocks left > 1 rev64 v8.16b, v9.16b //GHASH final-1 block ldr q9, [x0], #16 //AES final block - load ciphertext ldr q22, [x6, #32] //load h1l | h1h eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ldr q21, [x6, #16] //load h2k | h1k pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low ins v27.d[0], v8.d[1] //GHASH final-1 block - mid st1 { v12.16b}, [x2], #16 //AES final-1 block - store result pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high .long 0xce07752c //eor3 v12.16b, v9.16b, v7.16b, v29.16b //AES final block - result eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ins v27.d[1], v27.d[0] //GHASH final-1 block - mid pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high L192_dec_blocks_less_than_1: //blocks left <= 1 rev32 v30.16b, v30.16b and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 str q30, [x16] //store the updated counter neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 mvn x8, xzr //temp1_x = 0xffffffffffffffff lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 csel x13, x8, x7, lt csel x14, x7, xzr, lt ldr q20, [x6] //load h1l | h1h mov v0.d[1], x14 ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored mov v0.d[0], x13 //ctr0b is mask for last block and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits bif v12.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing rev64 v8.16b, v9.16b //GHASH final block st1 { v12.16b}, [x2] //store all 16B eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v16.d[0], v8.d[1] //GHASH final block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v16.8b, v16.8b, v8.8b //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high eor v19.16b, v19.16b, v26.16b //GHASH final block - low pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid eor v17.16b, v17.16b, v28.16b //GHASH final block - high eor v14.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid ext v17.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment eor v18.16b, v18.16b, v14.16b //MODULO - karatsuba tidy up .long 0xce115652 //eor3 v18.16b, v18.16b, v17.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v18.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .long 0xce124673 //eor3 v19.16b, v19.16b, v18.16b, v17.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret L192_dec_ret: mov w0, #0x0 ret .globl _aesv8_gcm_8x_enc_256 .private_extern _aesv8_gcm_8x_enc_256 .align 4 _aesv8_gcm_8x_enc_256: AARCH64_VALID_CALL_TARGET cbz x1, L256_enc_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 ld1 { v0.16b}, [x16] //CTR block 0 mov x5, x9 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 sub x5, x5, #1 //byte_len - 1 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) add x5, x5, x0 rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b ldp q27, q28, [x11, #160] //load rk10, rk11 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 9 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 10 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 9 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 10 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 11 ldp q26, q27, [x11, #192] //load rk12, rk13 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 11 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 11 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 11 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 11 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 11 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 7 ldr q28, [x11, #224] //load rk14 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 12 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 12 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 12 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 12 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 12 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 12 aese v2.16b, v27.16b //AES block 2 - round 13 aese v1.16b, v27.16b //AES block 1 - round 13 aese v4.16b, v27.16b //AES block 4 - round 13 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 12 aese v0.16b, v27.16b //AES block 0 - round 13 aese v5.16b, v27.16b //AES block 5 - round 13 aese v6.16b, v27.16b //AES block 6 - round 13 aese v7.16b, v27.16b //AES block 7 - round 13 aese v3.16b, v27.16b //AES block 3 - round 13 add x4, x0, x1, lsr #3 //end_input_ptr cmp x0, x5 //check if we have <= 8 blocks b.ge L256_enc_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load plaintext ldp q10, q11, [x0], #32 //AES block 2, 3 - load plaintext .long 0xce007108 //eor3 v8.16b, v8.16b, v0.16b, v28.16b //AES block 0 - result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 .long 0xce017129 //eor3 v9.16b, v9.16b, v1.16b, v28.16b //AES block 1 - result .long 0xce03716b //eor3 v11.16b, v11.16b, v3.16b, v28.16b //AES block 3 - result rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 ldp q12, q13, [x0], #32 //AES block 4, 5 - load plaintext ldp q14, q15, [x0], #32 //AES block 6, 7 - load plaintext .long 0xce02714a //eor3 v10.16b, v10.16b, v2.16b, v28.16b //AES block 2 - result cmp x0, x5 //check if we have <= 8 blocks rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 stp q8, q9, [x2], #32 //AES block 0, 1 - store result stp q10, q11, [x2], #32 //AES block 2, 3 - store result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 .long 0xce04718c //eor3 v12.16b, v12.16b, v4.16b, v28.16b //AES block 4 - result .long 0xce0771ef //eor3 v15.16b, v15.16b, v7.16b, v28.16b //AES block 7 - result .long 0xce0671ce //eor3 v14.16b, v14.16b, v6.16b, v28.16b //AES block 6 - result .long 0xce0571ad //eor3 v13.16b, v13.16b, v5.16b, v28.16b //AES block 5 - result stp q12, q13, [x2], #32 //AES block 4, 5 - store result rev32 v4.16b, v30.16b //CTR block 12 stp q14, q15, [x2], #32 //AES block 6, 7 - store result add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge L256_enc_prepretail //do prepretail L256_enc_main_loop: //main loop start ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev64 v11.16b, v11.16b //GHASH block 8k+3 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h rev64 v9.16b, v9.16b //GHASH block 8k+1 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 rev64 v8.16b, v8.16b //GHASH block 8k rev64 v12.16b, v12.16b //GHASH block 8k+4 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 rev32 v7.16b, v30.16b //CTR block 8k+15 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 eor v8.16b, v8.16b, v19.16b //PRE 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 rev64 v14.16b, v14.16b //GHASH block 8k+6 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 ldp q27, q28, [x11, #64] //load rk4, rk5 rev64 v10.16b, v10.16b //GHASH block 8k+2 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high rev64 v13.16b, v13.16b //GHASH block 8k+5 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid ldp q26, q27, [x11, #96] //load rk6, rk7 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 ldp q27, q28, [x11, #160] //load rk10, rk11 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low ldr d16, [x10] //MODULO - load modulo constant pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high ldp q26, q27, [x11, #192] //load rk12, rk13 rev32 v20.16b, v30.16b //CTR block 8k+16 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load plaintext aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 11 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 11 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 11 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 11 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 12 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 11 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 12 rev32 v22.16b, v30.16b //CTR block 8k+17 add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 11 .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 12 ldr q28, [x11, #224] //load rk14 aese v7.16b, v27.16b //AES block 8k+15 - round 13 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load plaintext aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 12 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 12 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 12 ldp q12, q13, [x0], #32 //AES block 4, 5 - load plaintext ldp q14, q15, [x0], #32 //AES block 6, 7 - load plaintext aese v2.16b, v27.16b //AES block 8k+10 - round 13 aese v4.16b, v27.16b //AES block 8k+12 - round 13 rev32 v23.16b, v30.16b //CTR block 8k+18 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 aese v5.16b, v27.16b //AES block 8k+13 - round 13 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 12 aese v3.16b, v27.16b //AES block 8k+11 - round 13 cmp x0, x5 //LOOP CONTROL .long 0xce02714a //eor3 v10.16b, v10.16b, v2.16b, v28.16b //AES block 8k+10 - result rev32 v25.16b, v30.16b //CTR block 8k+19 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 aese v0.16b, v27.16b //AES block 8k+8 - round 13 aese v6.16b, v27.16b //AES block 8k+14 - round 13 .long 0xce0571ad //eor3 v13.16b, v13.16b, v5.16b, v28.16b //AES block 5 - result ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v1.16b, v27.16b //AES block 8k+9 - round 13 .long 0xce04718c //eor3 v12.16b, v12.16b, v4.16b, v28.16b //AES block 4 - result rev32 v4.16b, v30.16b //CTR block 8k+20 .long 0xce03716b //eor3 v11.16b, v11.16b, v3.16b, v28.16b //AES block 8k+11 - result mov v3.16b, v25.16b //CTR block 8k+19 .long 0xce017129 //eor3 v9.16b, v9.16b, v1.16b, v28.16b //AES block 8k+9 - result .long 0xce007108 //eor3 v8.16b, v8.16b, v0.16b, v28.16b //AES block 8k+8 - result add v30.4s, v30.4s, v31.4s //CTR block 8k+20 stp q8, q9, [x2], #32 //AES block 8k+8, 8k+9 - store result mov v2.16b, v23.16b //CTR block 8k+18 .long 0xce0771ef //eor3 v15.16b, v15.16b, v7.16b, v28.16b //AES block 7 - result .long 0xce154673 //eor3 v19.16b, v19.16b, v21.16b, v17.16b //MODULO - fold into low stp q10, q11, [x2], #32 //AES block 8k+10, 8k+11 - store result .long 0xce0671ce //eor3 v14.16b, v14.16b, v6.16b, v28.16b //AES block 6 - result mov v1.16b, v22.16b //CTR block 8k+17 stp q12, q13, [x2], #32 //AES block 4, 5 - store result stp q14, q15, [x2], #32 //AES block 6, 7 - store result mov v0.16b, v20.16b //CTR block 8k+16 b.lt L256_enc_main_loop L256_enc_prepretail: //PREPRETAIL rev32 v5.16b, v30.16b //CTR block 8k+13 ldp q26, q27, [x11, #0] //load rk0, rk1 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v10.16b, v10.16b //GHASH block 8k+2 rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 rev64 v13.16b, v13.16b //GHASH block 8k+5 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev32 v7.16b, v30.16b //CTR block 8k+15 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v8.16b, v8.16b //GHASH block 8k aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 rev64 v9.16b, v9.16b //GHASH block 8k+1 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 eor v8.16b, v8.16b, v19.16b //PRE 1 rev64 v11.16b, v11.16b //GHASH block 8k+3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high rev64 v14.16b, v14.16b //GHASH block 8k+6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid rev64 v12.16b, v12.16b //GHASH block 8k+4 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 rev64 v15.16b, v15.16b //GHASH block 8k+7 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h ldp q28, q26, [x11, #128] //load rk8, rk9 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high ldp q27, q28, [x11, #160] //load rk10, rk11 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid ldr d16, [x10] //MODULO - load modulo constant .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 11 ldp q26, q27, [x11, #192] //load rk12, rk13 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 11 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 11 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 11 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 11 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 11 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 11 ldr q28, [x11, #224] //load rk14 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 12 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 12 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 12 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 12 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 12 add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 12 aese v0.16b, v27.16b //AES block 8k+8 - round 13 .long 0xce154673 //eor3 v19.16b, v19.16b, v21.16b, v17.16b //MODULO - fold into low aese v5.16b, v27.16b //AES block 8k+13 - round 13 aese v1.16b, v27.16b //AES block 8k+9 - round 13 aese v3.16b, v27.16b //AES block 8k+11 - round 13 aese v4.16b, v27.16b //AES block 8k+12 - round 13 aese v7.16b, v27.16b //AES block 8k+15 - round 13 aese v2.16b, v27.16b //AES block 8k+10 - round 13 aese v6.16b, v27.16b //AES block 8k+14 - round 13 L256_enc_tail: //TAIL ldp q24, q25, [x6, #160] //load h8l | h8h sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process ldr q8, [x0], #16 //AES block 8k+8 - load plaintext ldp q20, q21, [x6, #96] //load h5l | h5h ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag ldp q22, q23, [x6, #128] //load h6l | h6h mov v29.16b, v28.16b cmp x5, #112 .long 0xce007509 //eor3 v9.16b, v8.16b, v0.16b, v29.16b //AES block 8k+8 - result b.gt L256_enc_blocks_more_than_7 movi v19.8b, #0 mov v7.16b, v6.16b movi v17.8b, #0 mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v2.16b sub v30.4s, v30.4s, v31.4s mov v2.16b, v1.16b movi v18.8b, #0 cmp x5, #96 b.gt L256_enc_blocks_more_than_6 mov v7.16b, v6.16b mov v6.16b, v5.16b cmp x5, #80 mov v5.16b, v4.16b mov v4.16b, v3.16b mov v3.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt L256_enc_blocks_more_than_5 mov v7.16b, v6.16b sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b cmp x5, #64 mov v4.16b, v1.16b b.gt L256_enc_blocks_more_than_4 cmp x5, #48 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt L256_enc_blocks_more_than_3 cmp x5, #32 mov v7.16b, v6.16b ldr q24, [x6, #64] //load h4k | h3k mov v6.16b, v1.16b sub v30.4s, v30.4s, v31.4s b.gt L256_enc_blocks_more_than_2 mov v7.16b, v1.16b sub v30.4s, v30.4s, v31.4s cmp x5, #16 b.gt L256_enc_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b L256_enc_blocks_less_than_1 L256_enc_blocks_more_than_7: //blocks left > 7 st1 { v9.16b}, [x2], #16 //AES final-7 block - store result rev64 v8.16b, v9.16b //GHASH final-7 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-6 block - load plaintext pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high ins v27.d[0], v8.d[1] //GHASH final-7 block - mid ins v18.d[0], v24.d[1] //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid .long 0xce017529 //eor3 v9.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low L256_enc_blocks_more_than_6: //blocks left > 6 st1 { v9.16b}, [x2], #16 //AES final-6 block - store result rev64 v8.16b, v9.16b //GHASH final-6 block eor v8.16b, v8.16b, v16.16b //feed in partial tag pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low ins v27.d[0], v8.d[1] //GHASH final-6 block - mid pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high ldr q9, [x0], #16 //AES final-5 block - load plaintext eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid .long 0xce027529 //eor3 v9.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high L256_enc_blocks_more_than_5: //blocks left > 5 st1 { v9.16b}, [x2], #16 //AES final-5 block - store result rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-5 block - mid pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid ins v27.d[1], v27.d[0] //GHASH final-5 block - mid ldr q9, [x0], #16 //AES final-4 block - load plaintext pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid .long 0xce037529 //eor3 v9.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result L256_enc_blocks_more_than_4: //blocks left > 4 st1 { v9.16b}, [x2], #16 //AES final-4 block - store result rev64 v8.16b, v9.16b //GHASH final-4 block ldr q9, [x0], #16 //AES final-3 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-4 block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high .long 0xce047529 //eor3 v9.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high L256_enc_blocks_more_than_3: //blocks left > 3 st1 { v9.16b}, [x2], #16 //AES final-3 block - store result ldr q25, [x6, #80] //load h4l | h4h rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-3 block - mid pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ldr q24, [x6, #64] //load h4k | h3k ins v27.d[1], v27.d[0] //GHASH final-3 block - mid ldr q9, [x0], #16 //AES final-2 block - load plaintext pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low .long 0xce057529 //eor3 v9.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result movi v16.8b, #0 //supress further partial tag feed in eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low L256_enc_blocks_more_than_2: //blocks left > 2 ldr q23, [x6, #48] //load h3l | h3h st1 { v9.16b}, [x2], #16 //AES final-2 block - store result rev64 v8.16b, v9.16b //GHASH final-2 block ldr q9, [x0], #16 //AES final-1 block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-2 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high .long 0xce067529 //eor3 v9.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low L256_enc_blocks_more_than_1: //blocks left > 1 st1 { v9.16b}, [x2], #16 //AES final-1 block - store result ldr q22, [x6, #32] //load h2l | h2h rev64 v8.16b, v9.16b //GHASH final-1 block ldr q9, [x0], #16 //AES final block - load plaintext eor v8.16b, v8.16b, v16.16b //feed in partial tag movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-1 block - mid pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high .long 0xce077529 //eor3 v9.16b, v9.16b, v7.16b, v29.16b //AES final block - result eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ldr q21, [x6, #16] //load h2k | h1k eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low ins v27.d[1], v27.d[0] //GHASH final-1 block - mid pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid L256_enc_blocks_less_than_1: //blocks left <= 1 and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 mvn x8, xzr //temp1_x = 0xffffffffffffffff csel x14, x7, xzr, lt csel x13, x8, x7, lt mov v0.d[0], x13 //ctr0b is mask for last block ldr q20, [x6] //load h1l | h1h ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored mov v0.d[1], x14 and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits rev64 v8.16b, v9.16b //GHASH final block rev32 v30.16b, v30.16b bif v9.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing str q30, [x16] //store the updated counter eor v8.16b, v8.16b, v16.16b //feed in partial tag st1 { v9.16b}, [x2] //store all 16B ins v16.d[0], v8.d[1] //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v17.16b, v17.16b, v28.16b //GHASH final block - high eor v19.16b, v19.16b, v26.16b //GHASH final block - low eor v16.8b, v16.8b, v8.8b //GHASH final block - mid pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment .long 0xce115673 //eor3 v19.16b, v19.16b, v17.16b, v21.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 //return sizes ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret L256_enc_ret: mov w0, #0x0 ret .globl _aesv8_gcm_8x_dec_256 .private_extern _aesv8_gcm_8x_dec_256 .align 4 _aesv8_gcm_8x_dec_256: AARCH64_VALID_CALL_TARGET cbz x1, L256_dec_ret stp d8, d9, [sp, #-80]! lsr x9, x1, #3 mov x16, x4 mov x11, x5 stp d10, d11, [sp, #16] stp d12, d13, [sp, #32] stp d14, d15, [sp, #48] mov x5, #0xc200000000000000 stp x5, xzr, [sp, #64] add x10, sp, #64 ld1 { v0.16b}, [x16] //CTR block 0 mov x15, #0x100000000 //set up counter increment movi v31.16b, #0x0 mov v31.d[1], x15 mov x5, x9 sub x5, x5, #1 //byte_len - 1 rev32 v30.16b, v0.16b //set up reversed counter add v30.4s, v30.4s, v31.4s //CTR block 0 rev32 v1.16b, v30.16b //CTR block 1 add v30.4s, v30.4s, v31.4s //CTR block 1 rev32 v2.16b, v30.16b //CTR block 2 add v30.4s, v30.4s, v31.4s //CTR block 2 ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v3.16b, v30.16b //CTR block 3 add v30.4s, v30.4s, v31.4s //CTR block 3 rev32 v4.16b, v30.16b //CTR block 4 add v30.4s, v30.4s, v31.4s //CTR block 4 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 0 rev32 v5.16b, v30.16b //CTR block 5 add v30.4s, v30.4s, v31.4s //CTR block 5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 0 rev32 v6.16b, v30.16b //CTR block 6 add v30.4s, v30.4s, v31.4s //CTR block 6 rev32 v7.16b, v30.16b //CTR block 7 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 1 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 2 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 2 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 3 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 3 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 3 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 3 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 3 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 4 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 4 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 4 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 5 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 6 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 7 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 7 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 7 and x5, x5, #0xffffffffffffff80 //number of bytes to be processed in main loop (at least 1 byte must be handled by tail) aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 8 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 8 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 9 ld1 { v19.16b}, [x3] ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b ldp q27, q28, [x11, #160] //load rk10, rk11 add x4, x0, x1, lsr #3 //end_input_ptr add x5, x5, x0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 9 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 9 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 9 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 9 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 4 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 7 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 5 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 1 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 2 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 0 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 6 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 3 - round 10 ldp q26, q27, [x11, #192] //load rk12, rk13 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 0 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 7 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 7 - round 11 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 3 - round 11 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 1 - round 11 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 5 - round 11 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 4 - round 11 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 2 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 6 - round 11 ldr q28, [x11, #224] //load rk14 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 1 - round 12 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 4 - round 12 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 5 - round 12 cmp x0, x5 //check if we have <= 8 blocks aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 3 - round 12 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 2 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 6 - round 12 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 0 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 7 - round 12 aese v5.16b, v27.16b //AES block 5 - round 13 aese v1.16b, v27.16b //AES block 1 - round 13 aese v2.16b, v27.16b //AES block 2 - round 13 aese v0.16b, v27.16b //AES block 0 - round 13 aese v4.16b, v27.16b //AES block 4 - round 13 aese v6.16b, v27.16b //AES block 6 - round 13 aese v3.16b, v27.16b //AES block 3 - round 13 aese v7.16b, v27.16b //AES block 7 - round 13 b.ge L256_dec_tail //handle tail ldp q8, q9, [x0], #32 //AES block 0, 1 - load ciphertext ldp q10, q11, [x0], #32 //AES block 2, 3 - load ciphertext ldp q12, q13, [x0], #32 //AES block 4, 5 - load ciphertext ldp q14, q15, [x0], #32 //AES block 6, 7 - load ciphertext cmp x0, x5 //check if we have <= 8 blocks .long 0xce017121 //eor3 v1.16b, v9.16b, v1.16b, v28.16b //AES block 1 - result .long 0xce007100 //eor3 v0.16b, v8.16b, v0.16b, v28.16b //AES block 0 - result stp q0, q1, [x2], #32 //AES block 0, 1 - store result rev32 v0.16b, v30.16b //CTR block 8 add v30.4s, v30.4s, v31.4s //CTR block 8 .long 0xce037163 //eor3 v3.16b, v11.16b, v3.16b, v28.16b //AES block 3 - result .long 0xce0571a5 //eor3 v5.16b, v13.16b, v5.16b, v28.16b //AES block 5 - result .long 0xce047184 //eor3 v4.16b, v12.16b, v4.16b, v28.16b //AES block 4 - result rev32 v1.16b, v30.16b //CTR block 9 add v30.4s, v30.4s, v31.4s //CTR block 9 .long 0xce027142 //eor3 v2.16b, v10.16b, v2.16b, v28.16b //AES block 2 - result stp q2, q3, [x2], #32 //AES block 2, 3 - store result rev32 v2.16b, v30.16b //CTR block 10 add v30.4s, v30.4s, v31.4s //CTR block 10 .long 0xce0671c6 //eor3 v6.16b, v14.16b, v6.16b, v28.16b //AES block 6 - result rev32 v3.16b, v30.16b //CTR block 11 add v30.4s, v30.4s, v31.4s //CTR block 11 stp q4, q5, [x2], #32 //AES block 4, 5 - store result .long 0xce0771e7 //eor3 v7.16b, v15.16b, v7.16b, v28.16b //AES block 7 - result stp q6, q7, [x2], #32 //AES block 6, 7 - store result rev32 v4.16b, v30.16b //CTR block 12 add v30.4s, v30.4s, v31.4s //CTR block 12 b.ge L256_dec_prepretail //do prepretail L256_dec_main_loop: //main loop start rev32 v5.16b, v30.16b //CTR block 8k+13 ldp q26, q27, [x11, #0] //load rk0, rk1 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v9.16b, v9.16b //GHASH block 8k+1 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev32 v6.16b, v30.16b //CTR block 8k+14 add v30.4s, v30.4s, v31.4s //CTR block 8k+14 rev64 v8.16b, v8.16b //GHASH block 8k ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 rev64 v12.16b, v12.16b //GHASH block 8k+4 rev64 v11.16b, v11.16b //GHASH block 8k+3 rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v15.16b, v15.16b //GHASH block 8k+7 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 eor v8.16b, v8.16b, v19.16b //PRE 1 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 rev64 v10.16b, v10.16b //GHASH block 8k+2 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low ldp q26, q27, [x11, #96] //load rk6, rk7 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid rev64 v13.16b, v13.16b //GHASH block 8k+5 pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h rev64 v14.16b, v14.16b //GHASH block 8k+6 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 ldp q28, q26, [x11, #128] //load rk8, rk9 ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 ldp q27, q28, [x11, #160] //load rk10, rk11 pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid add v30.4s, v30.4s, v31.4s //CTR block 8k+15 .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 ldp q8, q9, [x0], #32 //AES block 8k+8, 8k+9 - load ciphertext eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low rev32 v20.16b, v30.16b //CTR block 8k+16 ldr d16, [x10] //MODULO - load modulo constant add v30.4s, v30.4s, v31.4s //CTR block 8k+16 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 11 ldp q26, q27, [x11, #192] //load rk12, rk13 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 11 .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid rev32 v22.16b, v30.16b //CTR block 8k+17 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 11 ldp q10, q11, [x0], #32 //AES block 8k+10, 8k+11 - load ciphertext aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 11 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 11 add v30.4s, v30.4s, v31.4s //CTR block 8k+17 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 11 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 12 rev32 v23.16b, v30.16b //CTR block 8k+18 add v30.4s, v30.4s, v31.4s //CTR block 8k+18 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 12 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 11 ldr q28, [x11, #224] //load rk14 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 12 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 12 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 12 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 12 ldp q12, q13, [x0], #32 //AES block 8k+12, 8k+13 - load ciphertext aese v1.16b, v27.16b //AES block 8k+9 - round 13 aese v2.16b, v27.16b //AES block 8k+10 - round 13 ldp q14, q15, [x0], #32 //AES block 8k+14, 8k+15 - load ciphertext aese v0.16b, v27.16b //AES block 8k+8 - round 13 aese v5.16b, v27.16b //AES block 8k+13 - round 13 rev32 v25.16b, v30.16b //CTR block 8k+19 .long 0xce027142 //eor3 v2.16b, v10.16b, v2.16b, v28.16b //AES block 8k+10 - result .long 0xce017121 //eor3 v1.16b, v9.16b, v1.16b, v28.16b //AES block 8k+9 - result ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v7.16b, v27.16b //AES block 8k+15 - round 13 add v30.4s, v30.4s, v31.4s //CTR block 8k+19 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v4.16b, v27.16b //AES block 8k+12 - round 13 .long 0xce0571a5 //eor3 v5.16b, v13.16b, v5.16b, v28.16b //AES block 8k+13 - result .long 0xce007100 //eor3 v0.16b, v8.16b, v0.16b, v28.16b //AES block 8k+8 - result aese v3.16b, v27.16b //AES block 8k+11 - round 13 stp q0, q1, [x2], #32 //AES block 8k+8, 8k+9 - store result mov v0.16b, v20.16b //CTR block 8k+16 .long 0xce047184 //eor3 v4.16b, v12.16b, v4.16b, v28.16b //AES block 8k+12 - result .long 0xce154673 //eor3 v19.16b, v19.16b, v21.16b, v17.16b //MODULO - fold into low .long 0xce037163 //eor3 v3.16b, v11.16b, v3.16b, v28.16b //AES block 8k+11 - result stp q2, q3, [x2], #32 //AES block 8k+10, 8k+11 - store result mov v3.16b, v25.16b //CTR block 8k+19 mov v2.16b, v23.16b //CTR block 8k+18 aese v6.16b, v27.16b //AES block 8k+14 - round 13 mov v1.16b, v22.16b //CTR block 8k+17 stp q4, q5, [x2], #32 //AES block 8k+12, 8k+13 - store result .long 0xce0771e7 //eor3 v7.16b, v15.16b, v7.16b, v28.16b //AES block 8k+15 - result .long 0xce0671c6 //eor3 v6.16b, v14.16b, v6.16b, v28.16b //AES block 8k+14 - result rev32 v4.16b, v30.16b //CTR block 8k+20 add v30.4s, v30.4s, v31.4s //CTR block 8k+20 cmp x0, x5 //LOOP CONTROL stp q6, q7, [x2], #32 //AES block 8k+14, 8k+15 - store result b.lt L256_dec_main_loop L256_dec_prepretail: //PREPRETAIL ldp q26, q27, [x11, #0] //load rk0, rk1 rev32 v5.16b, v30.16b //CTR block 8k+13 add v30.4s, v30.4s, v31.4s //CTR block 8k+13 rev64 v12.16b, v12.16b //GHASH block 8k+4 ldr q21, [x6, #112] //load h6k | h5k ldr q24, [x6, #160] //load h8k | h7k rev32 v6.16b, v30.16b //CTR block 8k+14 rev64 v8.16b, v8.16b //GHASH block 8k add v30.4s, v30.4s, v31.4s //CTR block 8k+14 ext v19.16b, v19.16b, v19.16b, #8 //PRE 0 ldr q23, [x6, #144] //load h7l | h7h ldr q25, [x6, #176] //load h8l | h8h rev64 v9.16b, v9.16b //GHASH block 8k+1 rev32 v7.16b, v30.16b //CTR block 8k+15 rev64 v10.16b, v10.16b //GHASH block 8k+2 ldr q20, [x6, #96] //load h5l | h5h ldr q22, [x6, #128] //load h6l | h6h aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 0 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 0 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 0 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 0 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 0 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 0 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 1 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 0 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 0 ldp q28, q26, [x11, #32] //load rk2, rk3 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 1 eor v8.16b, v8.16b, v19.16b //PRE 1 aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 1 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 1 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 1 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 1 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 1 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 1 pmull2 v16.1q, v9.2d, v23.2d //GHASH block 8k+1 - high trn1 v18.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid pmull v19.1q, v8.1d, v25.1d //GHASH block 8k - low rev64 v11.16b, v11.16b //GHASH block 8k+3 pmull v23.1q, v9.1d, v23.1d //GHASH block 8k+1 - low aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 2 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 2 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 2 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 2 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 2 pmull2 v17.1q, v8.2d, v25.2d //GHASH block 8k - high aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 2 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 3 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 3 rev64 v14.16b, v14.16b //GHASH block 8k+6 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 3 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 2 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 3 pmull2 v29.1q, v10.2d, v22.2d //GHASH block 8k+2 - high trn2 v8.2d, v9.2d, v8.2d //GHASH block 8k, 8k+1 - mid aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 2 ldp q27, q28, [x11, #64] //load rk4, rk5 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 3 pmull2 v9.1q, v11.2d, v20.2d //GHASH block 8k+3 - high aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 3 eor v17.16b, v17.16b, v16.16b //GHASH block 8k+1 - high eor v8.16b, v8.16b, v18.16b //GHASH block 8k, 8k+1 - mid aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 3 pmull v22.1q, v10.1d, v22.1d //GHASH block 8k+2 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 3 .long 0xce1d2631 //eor3 v17.16b, v17.16b, v29.16b, v9.16b //GHASH block 8k+2, 8k+3 - high trn1 v29.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid trn2 v10.2d, v11.2d, v10.2d //GHASH block 8k+2, 8k+3 - mid pmull2 v18.1q, v8.2d, v24.2d //GHASH block 8k - mid pmull v20.1q, v11.1d, v20.1d //GHASH block 8k+3 - low eor v19.16b, v19.16b, v23.16b //GHASH block 8k+1 - low pmull v24.1q, v8.1d, v24.1d //GHASH block 8k+1 - mid aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 4 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 4 .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+2, 8k+3 - low ldr q20, [x6] //load h1l | h1h ldr q22, [x6, #32] //load h2l | h2h aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 4 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 4 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 4 eor v18.16b, v18.16b, v24.16b //GHASH block 8k+1 - mid eor v10.16b, v10.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 5 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 4 aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 5 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 4 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 4 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 5 pmull2 v29.1q, v10.2d, v21.2d //GHASH block 8k+2 - mid aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 5 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 5 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 5 pmull v21.1q, v10.1d, v21.1d //GHASH block 8k+3 - mid aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 5 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 5 ldp q26, q27, [x11, #96] //load rk6, rk7 ldr q23, [x6, #48] //load h3l | h3h ldr q25, [x6, #80] //load h4l | h4h rev64 v15.16b, v15.16b //GHASH block 8k+7 rev64 v13.16b, v13.16b //GHASH block 8k+5 .long 0xce157652 //eor3 v18.16b, v18.16b, v21.16b, v29.16b //GHASH block 8k+2, 8k+3 - mid trn1 v16.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 6 ldr q21, [x6, #16] //load h2k | h1k ldr q24, [x6, #64] //load h4k | h3k aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 6 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 6 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 6 pmull2 v8.1q, v12.2d, v25.2d //GHASH block 8k+4 - high pmull2 v10.1q, v13.2d, v23.2d //GHASH block 8k+5 - high pmull v25.1q, v12.1d, v25.1d //GHASH block 8k+4 - low trn2 v12.2d, v13.2d, v12.2d //GHASH block 8k+4, 8k+5 - mid pmull v23.1q, v13.1d, v23.1d //GHASH block 8k+5 - low trn1 v13.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 7 pmull2 v11.1q, v14.2d, v22.2d //GHASH block 8k+6 - high aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 6 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 6 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 6 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 6 ldp q28, q26, [x11, #128] //load rk8, rk9 pmull v22.1q, v14.1d, v22.1d //GHASH block 8k+6 - low aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 7 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 7 aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 7 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 7 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 7 .long 0xce082a31 //eor3 v17.16b, v17.16b, v8.16b, v10.16b //GHASH block 8k+4, 8k+5 - high aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 7 trn2 v14.2d, v15.2d, v14.2d //GHASH block 8k+6, 8k+7 - mid aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 7 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 8 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 8 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 8 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 8 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 8 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 8 aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 8 aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 9 eor v12.16b, v12.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 9 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 9 eor v14.16b, v14.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 9 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 9 pmull2 v16.1q, v12.2d, v24.2d //GHASH block 8k+4 - mid aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 8 pmull v24.1q, v12.1d, v24.1d //GHASH block 8k+5 - mid pmull2 v12.1q, v15.2d, v20.2d //GHASH block 8k+7 - high pmull2 v13.1q, v14.2d, v21.2d //GHASH block 8k+6 - mid pmull v21.1q, v14.1d, v21.1d //GHASH block 8k+7 - mid pmull v20.1q, v15.1d, v20.1d //GHASH block 8k+7 - low ldp q27, q28, [x11, #160] //load rk10, rk11 .long 0xce195e73 //eor3 v19.16b, v19.16b, v25.16b, v23.16b //GHASH block 8k+4, 8k+5 - low .long 0xce184252 //eor3 v18.16b, v18.16b, v24.16b, v16.16b //GHASH block 8k+4, 8k+5 - mid aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 9 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 9 aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 9 .long 0xce0b3231 //eor3 v17.16b, v17.16b, v11.16b, v12.16b //GHASH block 8k+6, 8k+7 - high .long 0xce165273 //eor3 v19.16b, v19.16b, v22.16b, v20.16b //GHASH block 8k+6, 8k+7 - low ldr d16, [x10] //MODULO - load modulo constant .long 0xce153652 //eor3 v18.16b, v18.16b, v21.16b, v13.16b //GHASH block 8k+6, 8k+7 - mid aese v4.16b, v27.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 10 aese v6.16b, v27.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 10 aese v5.16b, v27.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 10 aese v0.16b, v27.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 10 aese v2.16b, v27.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 10 aese v3.16b, v27.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 10 .long 0xce114e52 //eor3 v18.16b, v18.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up aese v7.16b, v27.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 10 aese v1.16b, v27.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 10 ldp q26, q27, [x11, #192] //load rk12, rk13 ext v21.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment aese v2.16b, v28.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 11 aese v1.16b, v28.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 11 aese v0.16b, v28.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 11 pmull v29.1q, v17.1d, v16.1d //MODULO - top 64b align with mid aese v3.16b, v28.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 11 aese v7.16b, v28.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 11 aese v6.16b, v28.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 11 aese v4.16b, v28.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 11 aese v5.16b, v28.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 11 aese v3.16b, v26.16b aesmc v3.16b, v3.16b //AES block 8k+11 - round 12 .long 0xce1d5652 //eor3 v18.16b, v18.16b, v29.16b, v21.16b //MODULO - fold into mid aese v3.16b, v27.16b //AES block 8k+11 - round 13 aese v2.16b, v26.16b aesmc v2.16b, v2.16b //AES block 8k+10 - round 12 aese v6.16b, v26.16b aesmc v6.16b, v6.16b //AES block 8k+14 - round 12 pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low aese v4.16b, v26.16b aesmc v4.16b, v4.16b //AES block 8k+12 - round 12 aese v7.16b, v26.16b aesmc v7.16b, v7.16b //AES block 8k+15 - round 12 aese v0.16b, v26.16b aesmc v0.16b, v0.16b //AES block 8k+8 - round 12 ldr q28, [x11, #224] //load rk14 aese v1.16b, v26.16b aesmc v1.16b, v1.16b //AES block 8k+9 - round 12 aese v4.16b, v27.16b //AES block 8k+12 - round 13 ext v21.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment aese v5.16b, v26.16b aesmc v5.16b, v5.16b //AES block 8k+13 - round 12 aese v6.16b, v27.16b //AES block 8k+14 - round 13 aese v2.16b, v27.16b //AES block 8k+10 - round 13 aese v1.16b, v27.16b //AES block 8k+9 - round 13 aese v5.16b, v27.16b //AES block 8k+13 - round 13 .long 0xce154673 //eor3 v19.16b, v19.16b, v21.16b, v17.16b //MODULO - fold into low add v30.4s, v30.4s, v31.4s //CTR block 8k+15 aese v7.16b, v27.16b //AES block 8k+15 - round 13 aese v0.16b, v27.16b //AES block 8k+8 - round 13 L256_dec_tail: //TAIL ext v16.16b, v19.16b, v19.16b, #8 //prepare final partial tag sub x5, x4, x0 //main_end_input_ptr is number of bytes left to process cmp x5, #112 ldr q9, [x0], #16 //AES block 8k+8 - load ciphertext ldp q24, q25, [x6, #160] //load h8k | h7k mov v29.16b, v28.16b ldp q20, q21, [x6, #96] //load h5l | h5h .long 0xce00752c //eor3 v12.16b, v9.16b, v0.16b, v29.16b //AES block 8k+8 - result ldp q22, q23, [x6, #128] //load h6l | h6h b.gt L256_dec_blocks_more_than_7 mov v7.16b, v6.16b sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v3.16b movi v19.8b, #0 movi v17.8b, #0 movi v18.8b, #0 mov v3.16b, v2.16b cmp x5, #96 mov v2.16b, v1.16b b.gt L256_dec_blocks_more_than_6 mov v7.16b, v6.16b mov v6.16b, v5.16b mov v5.16b, v4.16b cmp x5, #80 sub v30.4s, v30.4s, v31.4s mov v4.16b, v3.16b mov v3.16b, v1.16b b.gt L256_dec_blocks_more_than_5 cmp x5, #64 mov v7.16b, v6.16b sub v30.4s, v30.4s, v31.4s mov v6.16b, v5.16b mov v5.16b, v4.16b mov v4.16b, v1.16b b.gt L256_dec_blocks_more_than_4 sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b cmp x5, #48 mov v6.16b, v5.16b mov v5.16b, v1.16b b.gt L256_dec_blocks_more_than_3 ldr q24, [x6, #64] //load h4k | h3k sub v30.4s, v30.4s, v31.4s mov v7.16b, v6.16b cmp x5, #32 mov v6.16b, v1.16b b.gt L256_dec_blocks_more_than_2 sub v30.4s, v30.4s, v31.4s mov v7.16b, v1.16b cmp x5, #16 b.gt L256_dec_blocks_more_than_1 sub v30.4s, v30.4s, v31.4s ldr q21, [x6, #16] //load h2k | h1k b L256_dec_blocks_less_than_1 L256_dec_blocks_more_than_7: //blocks left > 7 rev64 v8.16b, v9.16b //GHASH final-7 block ldr q9, [x0], #16 //AES final-6 block - load ciphertext st1 { v12.16b}, [x2], #16 //AES final-7 block - store result ins v18.d[0], v24.d[1] //GHASH final-7 block - mid eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-7 block - mid .long 0xce01752c //eor3 v12.16b, v9.16b, v1.16b, v29.16b //AES final-6 block - result pmull2 v17.1q, v8.2d, v25.2d //GHASH final-7 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-7 block - mid movi v16.8b, #0 //supress further partial tag feed in pmull v19.1q, v8.1d, v25.1d //GHASH final-7 block - low pmull v18.1q, v27.1d, v18.1d //GHASH final-7 block - mid L256_dec_blocks_more_than_6: //blocks left > 6 rev64 v8.16b, v9.16b //GHASH final-6 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-5 block - load ciphertext movi v16.8b, #0 //supress further partial tag feed in ins v27.d[0], v8.d[1] //GHASH final-6 block - mid st1 { v12.16b}, [x2], #16 //AES final-6 block - store result pmull2 v28.1q, v8.2d, v23.2d //GHASH final-6 block - high pmull v26.1q, v8.1d, v23.1d //GHASH final-6 block - low .long 0xce02752c //eor3 v12.16b, v9.16b, v2.16b, v29.16b //AES final-5 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-6 block - low eor v27.8b, v27.8b, v8.8b //GHASH final-6 block - mid pmull v27.1q, v27.1d, v24.1d //GHASH final-6 block - mid eor v18.16b, v18.16b, v27.16b //GHASH final-6 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-6 block - high L256_dec_blocks_more_than_5: //blocks left > 5 rev64 v8.16b, v9.16b //GHASH final-5 block eor v8.16b, v8.16b, v16.16b //feed in partial tag pmull2 v28.1q, v8.2d, v22.2d //GHASH final-5 block - high ins v27.d[0], v8.d[1] //GHASH final-5 block - mid ldr q9, [x0], #16 //AES final-4 block - load ciphertext eor v27.8b, v27.8b, v8.8b //GHASH final-5 block - mid st1 { v12.16b}, [x2], #16 //AES final-5 block - store result pmull v26.1q, v8.1d, v22.1d //GHASH final-5 block - low ins v27.d[1], v27.d[0] //GHASH final-5 block - mid pmull2 v27.1q, v27.2d, v21.2d //GHASH final-5 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-5 block - high .long 0xce03752c //eor3 v12.16b, v9.16b, v3.16b, v29.16b //AES final-4 block - result eor v19.16b, v19.16b, v26.16b //GHASH final-5 block - low eor v18.16b, v18.16b, v27.16b //GHASH final-5 block - mid movi v16.8b, #0 //supress further partial tag feed in L256_dec_blocks_more_than_4: //blocks left > 4 rev64 v8.16b, v9.16b //GHASH final-4 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-4 block - mid ldr q9, [x0], #16 //AES final-3 block - load ciphertext movi v16.8b, #0 //supress further partial tag feed in pmull v26.1q, v8.1d, v20.1d //GHASH final-4 block - low pmull2 v28.1q, v8.2d, v20.2d //GHASH final-4 block - high eor v27.8b, v27.8b, v8.8b //GHASH final-4 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-4 block - high pmull v27.1q, v27.1d, v21.1d //GHASH final-4 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-4 block - low st1 { v12.16b}, [x2], #16 //AES final-4 block - store result eor v18.16b, v18.16b, v27.16b //GHASH final-4 block - mid .long 0xce04752c //eor3 v12.16b, v9.16b, v4.16b, v29.16b //AES final-3 block - result L256_dec_blocks_more_than_3: //blocks left > 3 ldr q25, [x6, #80] //load h4l | h4h rev64 v8.16b, v9.16b //GHASH final-3 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ldr q9, [x0], #16 //AES final-2 block - load ciphertext ldr q24, [x6, #64] //load h4k | h3k ins v27.d[0], v8.d[1] //GHASH final-3 block - mid st1 { v12.16b}, [x2], #16 //AES final-3 block - store result .long 0xce05752c //eor3 v12.16b, v9.16b, v5.16b, v29.16b //AES final-2 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-3 block - mid ins v27.d[1], v27.d[0] //GHASH final-3 block - mid pmull v26.1q, v8.1d, v25.1d //GHASH final-3 block - low pmull2 v28.1q, v8.2d, v25.2d //GHASH final-3 block - high movi v16.8b, #0 //supress further partial tag feed in pmull2 v27.1q, v27.2d, v24.2d //GHASH final-3 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-3 block - low eor v17.16b, v17.16b, v28.16b //GHASH final-3 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-3 block - mid L256_dec_blocks_more_than_2: //blocks left > 2 rev64 v8.16b, v9.16b //GHASH final-2 block ldr q23, [x6, #48] //load h3l | h3h ldr q9, [x0], #16 //AES final-1 block - load ciphertext eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-2 block - mid pmull v26.1q, v8.1d, v23.1d //GHASH final-2 block - low st1 { v12.16b}, [x2], #16 //AES final-2 block - store result .long 0xce06752c //eor3 v12.16b, v9.16b, v6.16b, v29.16b //AES final-1 block - result eor v27.8b, v27.8b, v8.8b //GHASH final-2 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-2 block - low movi v16.8b, #0 //supress further partial tag feed in pmull v27.1q, v27.1d, v24.1d //GHASH final-2 block - mid pmull2 v28.1q, v8.2d, v23.2d //GHASH final-2 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-2 block - mid eor v17.16b, v17.16b, v28.16b //GHASH final-2 block - high L256_dec_blocks_more_than_1: //blocks left > 1 rev64 v8.16b, v9.16b //GHASH final-1 block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v27.d[0], v8.d[1] //GHASH final-1 block - mid ldr q22, [x6, #32] //load h2l | h2h eor v27.8b, v27.8b, v8.8b //GHASH final-1 block - mid ldr q9, [x0], #16 //AES final block - load ciphertext st1 { v12.16b}, [x2], #16 //AES final-1 block - store result ldr q21, [x6, #16] //load h2k | h1k pmull v26.1q, v8.1d, v22.1d //GHASH final-1 block - low ins v27.d[1], v27.d[0] //GHASH final-1 block - mid eor v19.16b, v19.16b, v26.16b //GHASH final-1 block - low .long 0xce07752c //eor3 v12.16b, v9.16b, v7.16b, v29.16b //AES final block - result pmull2 v28.1q, v8.2d, v22.2d //GHASH final-1 block - high pmull2 v27.1q, v27.2d, v21.2d //GHASH final-1 block - mid movi v16.8b, #0 //supress further partial tag feed in eor v17.16b, v17.16b, v28.16b //GHASH final-1 block - high eor v18.16b, v18.16b, v27.16b //GHASH final-1 block - mid L256_dec_blocks_less_than_1: //blocks left <= 1 ld1 { v26.16b}, [x2] //load existing bytes where the possibly partial last block is to be stored mvn x7, xzr //temp0_x = 0xffffffffffffffff and x1, x1, #127 //bit_length %= 128 sub x1, x1, #128 //bit_length -= 128 rev32 v30.16b, v30.16b str q30, [x16] //store the updated counter neg x1, x1 //bit_length = 128 - #bits in input (in range [1,128]) and x1, x1, #127 //bit_length %= 128 lsr x7, x7, x1 //temp0_x is mask for top 64b of last block cmp x1, #64 mvn x8, xzr //temp1_x = 0xffffffffffffffff csel x14, x7, xzr, lt csel x13, x8, x7, lt mov v0.d[0], x13 //ctr0b is mask for last block mov v0.d[1], x14 and v9.16b, v9.16b, v0.16b //possibly partial last block has zeroes in highest bits ldr q20, [x6] //load h1l | h1h bif v12.16b, v26.16b, v0.16b //insert existing bytes in top end of result before storing rev64 v8.16b, v9.16b //GHASH final block eor v8.16b, v8.16b, v16.16b //feed in partial tag ins v16.d[0], v8.d[1] //GHASH final block - mid pmull2 v28.1q, v8.2d, v20.2d //GHASH final block - high eor v16.8b, v16.8b, v8.8b //GHASH final block - mid pmull v26.1q, v8.1d, v20.1d //GHASH final block - low eor v17.16b, v17.16b, v28.16b //GHASH final block - high pmull v16.1q, v16.1d, v21.1d //GHASH final block - mid eor v18.16b, v18.16b, v16.16b //GHASH final block - mid ldr d16, [x10] //MODULO - load modulo constant eor v19.16b, v19.16b, v26.16b //GHASH final block - low pmull v21.1q, v17.1d, v16.1d //MODULO - top 64b align with mid eor v14.16b, v17.16b, v19.16b //MODULO - karatsuba tidy up ext v17.16b, v17.16b, v17.16b, #8 //MODULO - other top alignment st1 { v12.16b}, [x2] //store all 16B eor v18.16b, v18.16b, v14.16b //MODULO - karatsuba tidy up eor v21.16b, v17.16b, v21.16b //MODULO - fold into mid eor v18.16b, v18.16b, v21.16b //MODULO - fold into mid pmull v17.1q, v18.1d, v16.1d //MODULO - mid 64b align with low ext v18.16b, v18.16b, v18.16b, #8 //MODULO - other mid alignment eor v19.16b, v19.16b, v17.16b //MODULO - fold into low eor v19.16b, v19.16b, v18.16b //MODULO - fold into low ext v19.16b, v19.16b, v19.16b, #8 rev64 v19.16b, v19.16b st1 { v19.16b }, [x3] mov x0, x9 ldp d10, d11, [sp, #16] ldp d12, d13, [sp, #32] ldp d14, d15, [sp, #48] ldp d8, d9, [sp], #80 ret L256_dec_ret: mov w0, #0x0 ret .byte 65,69,83,32,71,67,77,32,109,111,100,117,108,101,32,102,111,114,32,65,82,77,118,56,44,32,83,80,68,88,32,66,83,68,45,51,45,67,108,97,117,115,101,32,98,121,32,60,120,105,97,111,107,97,110,103,46,113,105,97,110,64,97,114,109,46,99,111,109,62,0 .align 2 .align 2 #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
7,482
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include "openssl/arm_arch.h" .text .globl _beeu_mod_inverse_vartime .private_extern _beeu_mod_inverse_vartime .align 4 _beeu_mod_inverse_vartime: // Reserve enough space for 14 8-byte registers on the stack // in the first stp call for x29, x30. // Then store the remaining callee-saved registers. // // | x29 | x30 | x19 | x20 | ... | x27 | x28 | x0 | x2 | // ^ ^ // sp <------------------- 112 bytes ----------------> old sp // x29 (FP) // AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-112]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] stp x0,x2,[sp,#96] // B = b3..b0 := a ldp x25,x26,[x1] ldp x27,x28,[x1,#16] // n3..n0 := n // Note: the value of input params are changed in the following. ldp x0,x1,[x2] ldp x2,x30,[x2,#16] // A = a3..a0 := n mov x21, x0 mov x22, x1 mov x23, x2 mov x24, x30 // X = x4..x0 := 1 mov x3, #1 eor x4, x4, x4 eor x5, x5, x5 eor x6, x6, x6 eor x7, x7, x7 // Y = y4..y0 := 0 eor x8, x8, x8 eor x9, x9, x9 eor x10, x10, x10 eor x11, x11, x11 eor x12, x12, x12 Lbeeu_loop: // if B == 0, jump to .Lbeeu_loop_end orr x14, x25, x26 orr x14, x14, x27 // reverse the bit order of x25. This is needed for clz after this macro rbit x15, x25 orr x14, x14, x28 cbz x14,Lbeeu_loop_end // 0 < B < |n|, // 0 < A <= |n|, // (1) X*a == B (mod |n|), // (2) (-1)*Y*a == A (mod |n|) // Now divide B by the maximum possible power of two in the // integers, and divide X by the same value mod |n|. // When we're done, (1) still holds. // shift := number of trailing 0s in x25 // ( = number of leading 0s in x15; see the "rbit" instruction in TEST_B_ZERO) clz x13, x15 // If there is no shift, goto shift_A_Y cbz x13, Lbeeu_shift_A_Y // Shift B right by "x13" bits neg x14, x13 lsr x25, x25, x13 lsl x15, x26, x14 lsr x26, x26, x13 lsl x19, x27, x14 orr x25, x25, x15 lsr x27, x27, x13 lsl x20, x28, x14 orr x26, x26, x19 lsr x28, x28, x13 orr x27, x27, x20 // Shift X right by "x13" bits, adding n whenever X becomes odd. // x13--; // x14 := 0; needed in the addition to the most significant word in SHIFT1 eor x14, x14, x14 Lbeeu_shift_loop_X: tbz x3, #0, Lshift1_0 adds x3, x3, x0 adcs x4, x4, x1 adcs x5, x5, x2 adcs x6, x6, x30 adc x7, x7, x14 Lshift1_0: // var0 := [var1|var0]<64..1>; // i.e. concatenate var1 and var0, // extract bits <64..1> from the resulting 128-bit value // and put them in var0 extr x3, x4, x3, #1 extr x4, x5, x4, #1 extr x5, x6, x5, #1 extr x6, x7, x6, #1 lsr x7, x7, #1 subs x13, x13, #1 bne Lbeeu_shift_loop_X // Note: the steps above perform the same sequence as in p256_beeu-x86_64-asm.pl // with the following differences: // - "x13" is set directly to the number of trailing 0s in B // (using rbit and clz instructions) // - The loop is only used to call SHIFT1(X) // and x13 is decreased while executing the X loop. // - SHIFT256(B, x13) is performed before right-shifting X; they are independent Lbeeu_shift_A_Y: // Same for A and Y. // Afterwards, (2) still holds. // Reverse the bit order of x21 // x13 := number of trailing 0s in x21 (= number of leading 0s in x15) rbit x15, x21 clz x13, x15 // If there is no shift, goto |B-A|, X+Y update cbz x13, Lbeeu_update_B_X_or_A_Y // Shift A right by "x13" bits neg x14, x13 lsr x21, x21, x13 lsl x15, x22, x14 lsr x22, x22, x13 lsl x19, x23, x14 orr x21, x21, x15 lsr x23, x23, x13 lsl x20, x24, x14 orr x22, x22, x19 lsr x24, x24, x13 orr x23, x23, x20 // Shift Y right by "x13" bits, adding n whenever Y becomes odd. // x13--; // x14 := 0; needed in the addition to the most significant word in SHIFT1 eor x14, x14, x14 Lbeeu_shift_loop_Y: tbz x8, #0, Lshift1_1 adds x8, x8, x0 adcs x9, x9, x1 adcs x10, x10, x2 adcs x11, x11, x30 adc x12, x12, x14 Lshift1_1: // var0 := [var1|var0]<64..1>; // i.e. concatenate var1 and var0, // extract bits <64..1> from the resulting 128-bit value // and put them in var0 extr x8, x9, x8, #1 extr x9, x10, x9, #1 extr x10, x11, x10, #1 extr x11, x12, x11, #1 lsr x12, x12, #1 subs x13, x13, #1 bne Lbeeu_shift_loop_Y Lbeeu_update_B_X_or_A_Y: // Try T := B - A; if cs, continue with B > A (cs: carry set = no borrow) // Note: this is a case of unsigned arithmetic, where T fits in 4 64-bit words // without taking a sign bit if generated. The lack of a carry would // indicate a negative result. See, for example, // https://community.arm.com/developer/ip-products/processors/b/processors-ip-blog/posts/condition-codes-1-condition-flags-and-codes subs x14, x25, x21 sbcs x15, x26, x22 sbcs x19, x27, x23 sbcs x20, x28, x24 bcs Lbeeu_B_greater_than_A // Else A > B => // A := A - B; Y := Y + X; goto beginning of the loop subs x21, x21, x25 sbcs x22, x22, x26 sbcs x23, x23, x27 sbcs x24, x24, x28 adds x8, x8, x3 adcs x9, x9, x4 adcs x10, x10, x5 adcs x11, x11, x6 adc x12, x12, x7 b Lbeeu_loop Lbeeu_B_greater_than_A: // Continue with B > A => // B := B - A; X := X + Y; goto beginning of the loop mov x25, x14 mov x26, x15 mov x27, x19 mov x28, x20 adds x3, x3, x8 adcs x4, x4, x9 adcs x5, x5, x10 adcs x6, x6, x11 adc x7, x7, x12 b Lbeeu_loop Lbeeu_loop_end: // The Euclid's algorithm loop ends when A == gcd(a,n); // this would be 1, when a and n are co-prime (i.e. do not have a common factor). // Since (-1)*Y*a == A (mod |n|), Y>0 // then out = -Y mod n // Verify that A = 1 ==> (-1)*Y*a = A = 1 (mod |n|) // Is A-1 == 0? // If not, fail. sub x14, x21, #1 orr x14, x14, x22 orr x14, x14, x23 orr x14, x14, x24 cbnz x14, Lbeeu_err // If Y>n ==> Y:=Y-n Lbeeu_reduction_loop: // x_i := y_i - n_i (X is no longer needed, use it as temp) // (x14 = 0 from above) subs x3, x8, x0 sbcs x4, x9, x1 sbcs x5, x10, x2 sbcs x6, x11, x30 sbcs x7, x12, x14 // If result is non-negative (i.e., cs = carry set = no borrow), // y_i := x_i; goto reduce again // else // y_i := y_i; continue csel x8, x3, x8, cs csel x9, x4, x9, cs csel x10, x5, x10, cs csel x11, x6, x11, cs csel x12, x7, x12, cs bcs Lbeeu_reduction_loop // Now Y < n (Y cannot be equal to n, since the inverse cannot be 0) // out = -Y = n-Y subs x8, x0, x8 sbcs x9, x1, x9 sbcs x10, x2, x10 sbcs x11, x30, x11 // Save Y in output (out (x0) was saved on the stack) ldr x3, [sp,#96] stp x8, x9, [x3] stp x10, x11, [x3,#16] // return 1 (success) mov x0, #1 b Lbeeu_finish Lbeeu_err: // return 0 (error) eor x0, x0, x0 Lbeeu_finish: // Restore callee-saved registers, except x0, x2 add sp,x29,#0 ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] ldp x23,x24,[sp,#48] ldp x25,x26,[sp,#64] ldp x27,x28,[sp,#80] ldp x29,x30,[sp],#112 AARCH64_VALIDATE_LINK_REGISTER ret #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
37,549
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/p256-armv8-asm.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include "openssl/arm_arch.h" .section __TEXT,__const .align 5 Lpoly: .quad 0xffffffffffffffff,0x00000000ffffffff,0x0000000000000000,0xffffffff00000001 LRR: // 2^512 mod P precomputed for NIST P256 polynomial .quad 0x0000000000000003,0xfffffffbffffffff,0xfffffffffffffffe,0x00000004fffffffd Lone_mont: .quad 0x0000000000000001,0xffffffff00000000,0xffffffffffffffff,0x00000000fffffffe Lone: .quad 1,0,0,0 Lord: .quad 0xf3b9cac2fc632551,0xbce6faada7179e84,0xffffffffffffffff,0xffffffff00000000 LordK: .quad 0xccd1c8aaee00bc4f .byte 69,67,80,95,78,73,83,84,90,50,53,54,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .text // void ecp_nistz256_mul_mont(BN_ULONG x0[4],const BN_ULONG x1[4], // const BN_ULONG x2[4]); .globl _ecp_nistz256_mul_mont .private_extern _ecp_nistz256_mul_mont .align 4 _ecp_nistz256_mul_mont: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-32]! add x29,sp,#0 stp x19,x20,[sp,#16] ldr x3,[x2] // bp[0] ldp x4,x5,[x1] ldp x6,x7,[x1,#16] adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_mul_mont ldp x19,x20,[sp,#16] ldp x29,x30,[sp],#32 AARCH64_VALIDATE_LINK_REGISTER ret // void ecp_nistz256_sqr_mont(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl _ecp_nistz256_sqr_mont .private_extern _ecp_nistz256_sqr_mont .align 4 _ecp_nistz256_sqr_mont: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-32]! add x29,sp,#0 stp x19,x20,[sp,#16] ldp x4,x5,[x1] ldp x6,x7,[x1,#16] adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_sqr_mont ldp x19,x20,[sp,#16] ldp x29,x30,[sp],#32 AARCH64_VALIDATE_LINK_REGISTER ret // void ecp_nistz256_div_by_2(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl _ecp_nistz256_div_by_2 .private_extern _ecp_nistz256_div_by_2 .align 4 _ecp_nistz256_div_by_2: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ldp x14,x15,[x1] ldp x16,x17,[x1,#16] adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_div_by_2 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret // void ecp_nistz256_mul_by_2(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl _ecp_nistz256_mul_by_2 .private_extern _ecp_nistz256_mul_by_2 .align 4 _ecp_nistz256_mul_by_2: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ldp x14,x15,[x1] ldp x16,x17,[x1,#16] adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] mov x8,x14 mov x9,x15 mov x10,x16 mov x11,x17 bl __ecp_nistz256_add_to // ret = a+a // 2*a ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret // void ecp_nistz256_mul_by_3(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl _ecp_nistz256_mul_by_3 .private_extern _ecp_nistz256_mul_by_3 .align 4 _ecp_nistz256_mul_by_3: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ldp x14,x15,[x1] ldp x16,x17,[x1,#16] adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] mov x8,x14 mov x9,x15 mov x10,x16 mov x11,x17 mov x4,x14 mov x5,x15 mov x6,x16 mov x7,x17 bl __ecp_nistz256_add_to // ret = a+a // 2*a mov x8,x4 mov x9,x5 mov x10,x6 mov x11,x7 bl __ecp_nistz256_add_to // ret += a // 2*a+a=3*a ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret // void ecp_nistz256_sub(BN_ULONG x0[4],const BN_ULONG x1[4], // const BN_ULONG x2[4]); .globl _ecp_nistz256_sub .private_extern _ecp_nistz256_sub .align 4 _ecp_nistz256_sub: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ldp x14,x15,[x1] ldp x16,x17,[x1,#16] adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_sub_from ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret // void ecp_nistz256_neg(BN_ULONG x0[4],const BN_ULONG x1[4]); .globl _ecp_nistz256_neg .private_extern _ecp_nistz256_neg .align 4 _ecp_nistz256_neg: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 mov x2,x1 mov x14,xzr // a = 0 mov x15,xzr mov x16,xzr mov x17,xzr adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] bl __ecp_nistz256_sub_from ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret // note that __ecp_nistz256_mul_mont expects a[0-3] input pre-loaded // to x4-x7 and b[0] - to x3 .align 4 __ecp_nistz256_mul_mont: mul x14,x4,x3 // a[0]*b[0] umulh x8,x4,x3 mul x15,x5,x3 // a[1]*b[0] umulh x9,x5,x3 mul x16,x6,x3 // a[2]*b[0] umulh x10,x6,x3 mul x17,x7,x3 // a[3]*b[0] umulh x11,x7,x3 ldr x3,[x2,#8] // b[1] adds x15,x15,x8 // accumulate high parts of multiplication lsl x8,x14,#32 adcs x16,x16,x9 lsr x9,x14,#32 adcs x17,x17,x10 adc x19,xzr,x11 mov x20,xzr subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] mul x8,x4,x3 // lo(a[0]*b[i]) adcs x15,x16,x9 mul x9,x5,x3 // lo(a[1]*b[i]) adcs x16,x17,x10 // +=acc[0]*0xffff0001 mul x10,x6,x3 // lo(a[2]*b[i]) adcs x17,x19,x11 mul x11,x7,x3 // lo(a[3]*b[i]) adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts of multiplication umulh x8,x4,x3 // hi(a[0]*b[i]) adcs x15,x15,x9 umulh x9,x5,x3 // hi(a[1]*b[i]) adcs x16,x16,x10 umulh x10,x6,x3 // hi(a[2]*b[i]) adcs x17,x17,x11 umulh x11,x7,x3 // hi(a[3]*b[i]) adc x19,x19,xzr ldr x3,[x2,#8*(1+1)] // b[1+1] adds x15,x15,x8 // accumulate high parts of multiplication lsl x8,x14,#32 adcs x16,x16,x9 lsr x9,x14,#32 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] mul x8,x4,x3 // lo(a[0]*b[i]) adcs x15,x16,x9 mul x9,x5,x3 // lo(a[1]*b[i]) adcs x16,x17,x10 // +=acc[0]*0xffff0001 mul x10,x6,x3 // lo(a[2]*b[i]) adcs x17,x19,x11 mul x11,x7,x3 // lo(a[3]*b[i]) adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts of multiplication umulh x8,x4,x3 // hi(a[0]*b[i]) adcs x15,x15,x9 umulh x9,x5,x3 // hi(a[1]*b[i]) adcs x16,x16,x10 umulh x10,x6,x3 // hi(a[2]*b[i]) adcs x17,x17,x11 umulh x11,x7,x3 // hi(a[3]*b[i]) adc x19,x19,xzr ldr x3,[x2,#8*(2+1)] // b[2+1] adds x15,x15,x8 // accumulate high parts of multiplication lsl x8,x14,#32 adcs x16,x16,x9 lsr x9,x14,#32 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] mul x8,x4,x3 // lo(a[0]*b[i]) adcs x15,x16,x9 mul x9,x5,x3 // lo(a[1]*b[i]) adcs x16,x17,x10 // +=acc[0]*0xffff0001 mul x10,x6,x3 // lo(a[2]*b[i]) adcs x17,x19,x11 mul x11,x7,x3 // lo(a[3]*b[i]) adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts of multiplication umulh x8,x4,x3 // hi(a[0]*b[i]) adcs x15,x15,x9 umulh x9,x5,x3 // hi(a[1]*b[i]) adcs x16,x16,x10 umulh x10,x6,x3 // hi(a[2]*b[i]) adcs x17,x17,x11 umulh x11,x7,x3 // hi(a[3]*b[i]) adc x19,x19,xzr adds x15,x15,x8 // accumulate high parts of multiplication lsl x8,x14,#32 adcs x16,x16,x9 lsr x9,x14,#32 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr // last reduction subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 adcs x16,x17,x10 // +=acc[0]*0xffff0001 adcs x17,x19,x11 adc x19,x20,xzr adds x8,x14,#1 // subs x8,x14,#-1 // tmp = ret-modulus sbcs x9,x15,x12 sbcs x10,x16,xzr sbcs x11,x17,x13 sbcs xzr,x19,xzr // did it borrow? csel x14,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x15,x15,x9,lo csel x16,x16,x10,lo stp x14,x15,[x0] csel x17,x17,x11,lo stp x16,x17,[x0,#16] ret // note that __ecp_nistz256_sqr_mont expects a[0-3] input pre-loaded // to x4-x7 .align 4 __ecp_nistz256_sqr_mont: // | | | | | |a1*a0| | // | | | | |a2*a0| | | // | |a3*a2|a3*a0| | | | // | | | |a2*a1| | | | // | | |a3*a1| | | | | // *| | | | | | | | 2| // +|a3*a3|a2*a2|a1*a1|a0*a0| // |--+--+--+--+--+--+--+--| // |A7|A6|A5|A4|A3|A2|A1|A0|, where Ax is , i.e. follow // // "can't overflow" below mark carrying into high part of // multiplication result, which can't overflow, because it // can never be all ones. mul x15,x5,x4 // a[1]*a[0] umulh x9,x5,x4 mul x16,x6,x4 // a[2]*a[0] umulh x10,x6,x4 mul x17,x7,x4 // a[3]*a[0] umulh x19,x7,x4 adds x16,x16,x9 // accumulate high parts of multiplication mul x8,x6,x5 // a[2]*a[1] umulh x9,x6,x5 adcs x17,x17,x10 mul x10,x7,x5 // a[3]*a[1] umulh x11,x7,x5 adc x19,x19,xzr // can't overflow mul x20,x7,x6 // a[3]*a[2] umulh x1,x7,x6 adds x9,x9,x10 // accumulate high parts of multiplication mul x14,x4,x4 // a[0]*a[0] adc x10,x11,xzr // can't overflow adds x17,x17,x8 // accumulate low parts of multiplication umulh x4,x4,x4 adcs x19,x19,x9 mul x9,x5,x5 // a[1]*a[1] adcs x20,x20,x10 umulh x5,x5,x5 adc x1,x1,xzr // can't overflow adds x15,x15,x15 // acc[1-6]*=2 mul x10,x6,x6 // a[2]*a[2] adcs x16,x16,x16 umulh x6,x6,x6 adcs x17,x17,x17 mul x11,x7,x7 // a[3]*a[3] adcs x19,x19,x19 umulh x7,x7,x7 adcs x20,x20,x20 adcs x1,x1,x1 adc x2,xzr,xzr adds x15,x15,x4 // +a[i]*a[i] adcs x16,x16,x9 adcs x17,x17,x5 adcs x19,x19,x10 adcs x20,x20,x6 lsl x8,x14,#32 adcs x1,x1,x11 lsr x9,x14,#32 adc x2,x2,x7 subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 lsl x8,x14,#32 adcs x16,x17,x10 // +=acc[0]*0xffff0001 lsr x9,x14,#32 adc x17,x11,xzr // can't overflow subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 lsl x8,x14,#32 adcs x16,x17,x10 // +=acc[0]*0xffff0001 lsr x9,x14,#32 adc x17,x11,xzr // can't overflow subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 lsl x8,x14,#32 adcs x16,x17,x10 // +=acc[0]*0xffff0001 lsr x9,x14,#32 adc x17,x11,xzr // can't overflow subs x10,x14,x8 // "*0xffff0001" sbc x11,x14,x9 adds x14,x15,x8 // +=acc[0]<<96 and omit acc[0] adcs x15,x16,x9 adcs x16,x17,x10 // +=acc[0]*0xffff0001 adc x17,x11,xzr // can't overflow adds x14,x14,x19 // accumulate upper half adcs x15,x15,x20 adcs x16,x16,x1 adcs x17,x17,x2 adc x19,xzr,xzr adds x8,x14,#1 // subs x8,x14,#-1 // tmp = ret-modulus sbcs x9,x15,x12 sbcs x10,x16,xzr sbcs x11,x17,x13 sbcs xzr,x19,xzr // did it borrow? csel x14,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x15,x15,x9,lo csel x16,x16,x10,lo stp x14,x15,[x0] csel x17,x17,x11,lo stp x16,x17,[x0,#16] ret // Note that __ecp_nistz256_add_to expects both input vectors pre-loaded to // x4-x7 and x8-x11. This is done because it's used in multiple // contexts, e.g. in multiplication by 2 and 3... .align 4 __ecp_nistz256_add_to: adds x14,x14,x8 // ret = a+b adcs x15,x15,x9 adcs x16,x16,x10 adcs x17,x17,x11 adc x1,xzr,xzr // zap x1 adds x8,x14,#1 // subs x8,x4,#-1 // tmp = ret-modulus sbcs x9,x15,x12 sbcs x10,x16,xzr sbcs x11,x17,x13 sbcs xzr,x1,xzr // did subtraction borrow? csel x14,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x15,x15,x9,lo csel x16,x16,x10,lo stp x14,x15,[x0] csel x17,x17,x11,lo stp x16,x17,[x0,#16] ret .align 4 __ecp_nistz256_sub_from: ldp x8,x9,[x2] ldp x10,x11,[x2,#16] subs x14,x14,x8 // ret = a-b sbcs x15,x15,x9 sbcs x16,x16,x10 sbcs x17,x17,x11 sbc x1,xzr,xzr // zap x1 subs x8,x14,#1 // adds x8,x4,#-1 // tmp = ret+modulus adcs x9,x15,x12 adcs x10,x16,xzr adc x11,x17,x13 cmp x1,xzr // did subtraction borrow? csel x14,x14,x8,eq // ret = borrow ? ret+modulus : ret csel x15,x15,x9,eq csel x16,x16,x10,eq stp x14,x15,[x0] csel x17,x17,x11,eq stp x16,x17,[x0,#16] ret .align 4 __ecp_nistz256_sub_morf: ldp x8,x9,[x2] ldp x10,x11,[x2,#16] subs x14,x8,x14 // ret = b-a sbcs x15,x9,x15 sbcs x16,x10,x16 sbcs x17,x11,x17 sbc x1,xzr,xzr // zap x1 subs x8,x14,#1 // adds x8,x4,#-1 // tmp = ret+modulus adcs x9,x15,x12 adcs x10,x16,xzr adc x11,x17,x13 cmp x1,xzr // did subtraction borrow? csel x14,x14,x8,eq // ret = borrow ? ret+modulus : ret csel x15,x15,x9,eq csel x16,x16,x10,eq stp x14,x15,[x0] csel x17,x17,x11,eq stp x16,x17,[x0,#16] ret .align 4 __ecp_nistz256_div_by_2: subs x8,x14,#1 // adds x8,x4,#-1 // tmp = a+modulus adcs x9,x15,x12 adcs x10,x16,xzr adcs x11,x17,x13 adc x1,xzr,xzr // zap x1 tst x14,#1 // is a even? csel x14,x14,x8,eq // ret = even ? a : a+modulus csel x15,x15,x9,eq csel x16,x16,x10,eq csel x17,x17,x11,eq csel x1,xzr,x1,eq lsr x14,x14,#1 // ret >>= 1 orr x14,x14,x15,lsl#63 lsr x15,x15,#1 orr x15,x15,x16,lsl#63 lsr x16,x16,#1 orr x16,x16,x17,lsl#63 lsr x17,x17,#1 stp x14,x15,[x0] orr x17,x17,x1,lsl#63 stp x16,x17,[x0,#16] ret .globl _ecp_nistz256_point_double .private_extern _ecp_nistz256_point_double .align 5 _ecp_nistz256_point_double: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] sub sp,sp,#32*4 Ldouble_shortcut: ldp x14,x15,[x1,#32] mov x21,x0 ldp x16,x17,[x1,#48] mov x22,x1 adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] mov x8,x14 ldr x13,[x13,#24] mov x9,x15 ldp x4,x5,[x22,#64] // forward load for p256_sqr_mont mov x10,x16 mov x11,x17 ldp x6,x7,[x22,#64+16] add x0,sp,#0 bl __ecp_nistz256_add_to // p256_mul_by_2(S, in_y); add x0,sp,#64 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Zsqr, in_z); ldp x8,x9,[x22] ldp x10,x11,[x22,#16] mov x4,x14 // put Zsqr aside for p256_sub mov x5,x15 mov x6,x16 mov x7,x17 add x0,sp,#32 bl __ecp_nistz256_add_to // p256_add(M, Zsqr, in_x); add x2,x22,#0 mov x14,x4 // restore Zsqr mov x15,x5 ldp x4,x5,[sp,#0] // forward load for p256_sqr_mont mov x16,x6 mov x17,x7 ldp x6,x7,[sp,#0+16] add x0,sp,#64 bl __ecp_nistz256_sub_morf // p256_sub(Zsqr, in_x, Zsqr); add x0,sp,#0 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(S, S); ldr x3,[x22,#32] ldp x4,x5,[x22,#64] ldp x6,x7,[x22,#64+16] add x2,x22,#32 add x0,sp,#96 bl __ecp_nistz256_mul_mont // p256_mul_mont(tmp0, in_z, in_y); mov x8,x14 mov x9,x15 ldp x4,x5,[sp,#0] // forward load for p256_sqr_mont mov x10,x16 mov x11,x17 ldp x6,x7,[sp,#0+16] add x0,x21,#64 bl __ecp_nistz256_add_to // p256_mul_by_2(res_z, tmp0); add x0,sp,#96 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(tmp0, S); ldr x3,[sp,#64] // forward load for p256_mul_mont ldp x4,x5,[sp,#32] ldp x6,x7,[sp,#32+16] add x0,x21,#32 bl __ecp_nistz256_div_by_2 // p256_div_by_2(res_y, tmp0); add x2,sp,#64 add x0,sp,#32 bl __ecp_nistz256_mul_mont // p256_mul_mont(M, M, Zsqr); mov x8,x14 // duplicate M mov x9,x15 mov x10,x16 mov x11,x17 mov x4,x14 // put M aside mov x5,x15 mov x6,x16 mov x7,x17 add x0,sp,#32 bl __ecp_nistz256_add_to mov x8,x4 // restore M mov x9,x5 ldr x3,[x22] // forward load for p256_mul_mont mov x10,x6 ldp x4,x5,[sp,#0] mov x11,x7 ldp x6,x7,[sp,#0+16] bl __ecp_nistz256_add_to // p256_mul_by_3(M, M); add x2,x22,#0 add x0,sp,#0 bl __ecp_nistz256_mul_mont // p256_mul_mont(S, S, in_x); mov x8,x14 mov x9,x15 ldp x4,x5,[sp,#32] // forward load for p256_sqr_mont mov x10,x16 mov x11,x17 ldp x6,x7,[sp,#32+16] add x0,sp,#96 bl __ecp_nistz256_add_to // p256_mul_by_2(tmp0, S); add x0,x21,#0 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(res_x, M); add x2,sp,#96 bl __ecp_nistz256_sub_from // p256_sub(res_x, res_x, tmp0); add x2,sp,#0 add x0,sp,#0 bl __ecp_nistz256_sub_morf // p256_sub(S, S, res_x); ldr x3,[sp,#32] mov x4,x14 // copy S mov x5,x15 mov x6,x16 mov x7,x17 add x2,sp,#32 bl __ecp_nistz256_mul_mont // p256_mul_mont(S, S, M); add x2,x21,#32 add x0,x21,#32 bl __ecp_nistz256_sub_from // p256_sub(res_y, S, res_y); add sp,x29,#0 // destroy frame ldp x19,x20,[x29,#16] ldp x21,x22,[x29,#32] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .globl _ecp_nistz256_point_add .private_extern _ecp_nistz256_point_add .align 5 _ecp_nistz256_point_add: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#32*12 ldp x4,x5,[x2,#64] // in2_z ldp x6,x7,[x2,#64+16] mov x21,x0 mov x22,x1 mov x23,x2 adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] orr x8,x4,x5 orr x10,x6,x7 orr x25,x8,x10 cmp x25,#0 csetm x25,ne // ~in2infty add x0,sp,#192 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z2sqr, in2_z); ldp x4,x5,[x22,#64] // in1_z ldp x6,x7,[x22,#64+16] orr x8,x4,x5 orr x10,x6,x7 orr x24,x8,x10 cmp x24,#0 csetm x24,ne // ~in1infty add x0,sp,#128 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z1sqr, in1_z); ldr x3,[x23,#64] ldp x4,x5,[sp,#192] ldp x6,x7,[sp,#192+16] add x2,x23,#64 add x0,sp,#320 bl __ecp_nistz256_mul_mont // p256_mul_mont(S1, Z2sqr, in2_z); ldr x3,[x22,#64] ldp x4,x5,[sp,#128] ldp x6,x7,[sp,#128+16] add x2,x22,#64 add x0,sp,#352 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, Z1sqr, in1_z); ldr x3,[x22,#32] ldp x4,x5,[sp,#320] ldp x6,x7,[sp,#320+16] add x2,x22,#32 add x0,sp,#320 bl __ecp_nistz256_mul_mont // p256_mul_mont(S1, S1, in1_y); ldr x3,[x23,#32] ldp x4,x5,[sp,#352] ldp x6,x7,[sp,#352+16] add x2,x23,#32 add x0,sp,#352 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, S2, in2_y); add x2,sp,#320 ldr x3,[sp,#192] // forward load for p256_mul_mont ldp x4,x5,[x22] ldp x6,x7,[x22,#16] add x0,sp,#160 bl __ecp_nistz256_sub_from // p256_sub(R, S2, S1); orr x14,x14,x15 // see if result is zero orr x16,x16,x17 orr x26,x14,x16 // ~is_equal(S1,S2) add x2,sp,#192 add x0,sp,#256 bl __ecp_nistz256_mul_mont // p256_mul_mont(U1, in1_x, Z2sqr); ldr x3,[sp,#128] ldp x4,x5,[x23] ldp x6,x7,[x23,#16] add x2,sp,#128 add x0,sp,#288 bl __ecp_nistz256_mul_mont // p256_mul_mont(U2, in2_x, Z1sqr); add x2,sp,#256 ldp x4,x5,[sp,#160] // forward load for p256_sqr_mont ldp x6,x7,[sp,#160+16] add x0,sp,#96 bl __ecp_nistz256_sub_from // p256_sub(H, U2, U1); orr x14,x14,x15 // see if result is zero orr x16,x16,x17 orr x14,x14,x16 // ~is_equal(U1,U2) mvn x27,x24 // -1/0 -> 0/-1 mvn x28,x25 // -1/0 -> 0/-1 orr x14,x14,x27 orr x14,x14,x28 orr x14,x14,x26 cbnz x14,Ladd_proceed // if(~is_equal(U1,U2) | in1infty | in2infty | ~is_equal(S1,S2)) Ladd_double: mov x1,x22 mov x0,x21 ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] add sp,sp,#256 // #256 is from #32*(12-4). difference in stack frames b Ldouble_shortcut .align 4 Ladd_proceed: add x0,sp,#192 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Rsqr, R); ldr x3,[x22,#64] ldp x4,x5,[sp,#96] ldp x6,x7,[sp,#96+16] add x2,x22,#64 add x0,sp,#64 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_z, H, in1_z); ldp x4,x5,[sp,#96] ldp x6,x7,[sp,#96+16] add x0,sp,#128 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Hsqr, H); ldr x3,[x23,#64] ldp x4,x5,[sp,#64] ldp x6,x7,[sp,#64+16] add x2,x23,#64 add x0,sp,#64 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_z, res_z, in2_z); ldr x3,[sp,#96] ldp x4,x5,[sp,#128] ldp x6,x7,[sp,#128+16] add x2,sp,#96 add x0,sp,#224 bl __ecp_nistz256_mul_mont // p256_mul_mont(Hcub, Hsqr, H); ldr x3,[sp,#128] ldp x4,x5,[sp,#256] ldp x6,x7,[sp,#256+16] add x2,sp,#128 add x0,sp,#288 bl __ecp_nistz256_mul_mont // p256_mul_mont(U2, U1, Hsqr); mov x8,x14 mov x9,x15 mov x10,x16 mov x11,x17 add x0,sp,#128 bl __ecp_nistz256_add_to // p256_mul_by_2(Hsqr, U2); add x2,sp,#192 add x0,sp,#0 bl __ecp_nistz256_sub_morf // p256_sub(res_x, Rsqr, Hsqr); add x2,sp,#224 bl __ecp_nistz256_sub_from // p256_sub(res_x, res_x, Hcub); add x2,sp,#288 ldr x3,[sp,#224] // forward load for p256_mul_mont ldp x4,x5,[sp,#320] ldp x6,x7,[sp,#320+16] add x0,sp,#32 bl __ecp_nistz256_sub_morf // p256_sub(res_y, U2, res_x); add x2,sp,#224 add x0,sp,#352 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, S1, Hcub); ldr x3,[sp,#160] ldp x4,x5,[sp,#32] ldp x6,x7,[sp,#32+16] add x2,sp,#160 add x0,sp,#32 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_y, res_y, R); add x2,sp,#352 bl __ecp_nistz256_sub_from // p256_sub(res_y, res_y, S2); ldp x4,x5,[sp,#0] // res ldp x6,x7,[sp,#0+16] ldp x8,x9,[x23] // in2 ldp x10,x11,[x23,#16] ldp x14,x15,[x22,#0] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#0+16] csel x8,x4,x8,ne csel x9,x5,x9,ne ldp x4,x5,[sp,#0+0+32] // res csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? ldp x6,x7,[sp,#0+0+48] csel x14,x8,x14,ne csel x15,x9,x15,ne ldp x8,x9,[x23,#0+32] // in2 csel x16,x10,x16,ne csel x17,x11,x17,ne ldp x10,x11,[x23,#0+48] stp x14,x15,[x21,#0] stp x16,x17,[x21,#0+16] ldp x14,x15,[x22,#32] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#32+16] csel x8,x4,x8,ne csel x9,x5,x9,ne ldp x4,x5,[sp,#0+32+32] // res csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? ldp x6,x7,[sp,#0+32+48] csel x14,x8,x14,ne csel x15,x9,x15,ne ldp x8,x9,[x23,#32+32] // in2 csel x16,x10,x16,ne csel x17,x11,x17,ne ldp x10,x11,[x23,#32+48] stp x14,x15,[x21,#32] stp x16,x17,[x21,#32+16] ldp x14,x15,[x22,#64] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#64+16] csel x8,x4,x8,ne csel x9,x5,x9,ne csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? csel x14,x8,x14,ne csel x15,x9,x15,ne csel x16,x10,x16,ne csel x17,x11,x17,ne stp x14,x15,[x21,#64] stp x16,x17,[x21,#64+16] Ladd_done: add sp,x29,#0 // destroy frame ldp x19,x20,[x29,#16] ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .globl _ecp_nistz256_point_add_affine .private_extern _ecp_nistz256_point_add_affine .align 5 _ecp_nistz256_point_add_affine: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-80]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] sub sp,sp,#32*10 mov x21,x0 mov x22,x1 mov x23,x2 adrp x13,Lpoly@PAGE add x13,x13,Lpoly@PAGEOFF ldr x12,[x13,#8] ldr x13,[x13,#24] ldp x4,x5,[x1,#64] // in1_z ldp x6,x7,[x1,#64+16] orr x8,x4,x5 orr x10,x6,x7 orr x24,x8,x10 cmp x24,#0 csetm x24,ne // ~in1infty ldp x14,x15,[x2] // in2_x ldp x16,x17,[x2,#16] ldp x8,x9,[x2,#32] // in2_y ldp x10,x11,[x2,#48] orr x14,x14,x15 orr x16,x16,x17 orr x8,x8,x9 orr x10,x10,x11 orr x14,x14,x16 orr x8,x8,x10 orr x25,x14,x8 cmp x25,#0 csetm x25,ne // ~in2infty add x0,sp,#128 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z1sqr, in1_z); mov x4,x14 mov x5,x15 mov x6,x16 mov x7,x17 ldr x3,[x23] add x2,x23,#0 add x0,sp,#96 bl __ecp_nistz256_mul_mont // p256_mul_mont(U2, Z1sqr, in2_x); add x2,x22,#0 ldr x3,[x22,#64] // forward load for p256_mul_mont ldp x4,x5,[sp,#128] ldp x6,x7,[sp,#128+16] add x0,sp,#160 bl __ecp_nistz256_sub_from // p256_sub(H, U2, in1_x); add x2,x22,#64 add x0,sp,#128 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, Z1sqr, in1_z); ldr x3,[x22,#64] ldp x4,x5,[sp,#160] ldp x6,x7,[sp,#160+16] add x2,x22,#64 add x0,sp,#64 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_z, H, in1_z); ldr x3,[x23,#32] ldp x4,x5,[sp,#128] ldp x6,x7,[sp,#128+16] add x2,x23,#32 add x0,sp,#128 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, S2, in2_y); add x2,x22,#32 ldp x4,x5,[sp,#160] // forward load for p256_sqr_mont ldp x6,x7,[sp,#160+16] add x0,sp,#192 bl __ecp_nistz256_sub_from // p256_sub(R, S2, in1_y); add x0,sp,#224 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Hsqr, H); ldp x4,x5,[sp,#192] ldp x6,x7,[sp,#192+16] add x0,sp,#288 bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Rsqr, R); ldr x3,[sp,#160] ldp x4,x5,[sp,#224] ldp x6,x7,[sp,#224+16] add x2,sp,#160 add x0,sp,#256 bl __ecp_nistz256_mul_mont // p256_mul_mont(Hcub, Hsqr, H); ldr x3,[x22] ldp x4,x5,[sp,#224] ldp x6,x7,[sp,#224+16] add x2,x22,#0 add x0,sp,#96 bl __ecp_nistz256_mul_mont // p256_mul_mont(U2, in1_x, Hsqr); mov x8,x14 mov x9,x15 mov x10,x16 mov x11,x17 add x0,sp,#224 bl __ecp_nistz256_add_to // p256_mul_by_2(Hsqr, U2); add x2,sp,#288 add x0,sp,#0 bl __ecp_nistz256_sub_morf // p256_sub(res_x, Rsqr, Hsqr); add x2,sp,#256 bl __ecp_nistz256_sub_from // p256_sub(res_x, res_x, Hcub); add x2,sp,#96 ldr x3,[x22,#32] // forward load for p256_mul_mont ldp x4,x5,[sp,#256] ldp x6,x7,[sp,#256+16] add x0,sp,#32 bl __ecp_nistz256_sub_morf // p256_sub(res_y, U2, res_x); add x2,x22,#32 add x0,sp,#128 bl __ecp_nistz256_mul_mont // p256_mul_mont(S2, in1_y, Hcub); ldr x3,[sp,#192] ldp x4,x5,[sp,#32] ldp x6,x7,[sp,#32+16] add x2,sp,#192 add x0,sp,#32 bl __ecp_nistz256_mul_mont // p256_mul_mont(res_y, res_y, R); add x2,sp,#128 bl __ecp_nistz256_sub_from // p256_sub(res_y, res_y, S2); ldp x4,x5,[sp,#0] // res ldp x6,x7,[sp,#0+16] ldp x8,x9,[x23] // in2 ldp x10,x11,[x23,#16] ldp x14,x15,[x22,#0] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#0+16] csel x8,x4,x8,ne csel x9,x5,x9,ne ldp x4,x5,[sp,#0+0+32] // res csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? ldp x6,x7,[sp,#0+0+48] csel x14,x8,x14,ne csel x15,x9,x15,ne ldp x8,x9,[x23,#0+32] // in2 csel x16,x10,x16,ne csel x17,x11,x17,ne ldp x10,x11,[x23,#0+48] stp x14,x15,[x21,#0] stp x16,x17,[x21,#0+16] adrp x23,Lone_mont@PAGE-64 add x23,x23,Lone_mont@PAGEOFF-64 ldp x14,x15,[x22,#32] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#32+16] csel x8,x4,x8,ne csel x9,x5,x9,ne ldp x4,x5,[sp,#0+32+32] // res csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? ldp x6,x7,[sp,#0+32+48] csel x14,x8,x14,ne csel x15,x9,x15,ne ldp x8,x9,[x23,#32+32] // in2 csel x16,x10,x16,ne csel x17,x11,x17,ne ldp x10,x11,[x23,#32+48] stp x14,x15,[x21,#32] stp x16,x17,[x21,#32+16] ldp x14,x15,[x22,#64] // in1 cmp x24,#0 // ~, remember? ldp x16,x17,[x22,#64+16] csel x8,x4,x8,ne csel x9,x5,x9,ne csel x10,x6,x10,ne csel x11,x7,x11,ne cmp x25,#0 // ~, remember? csel x14,x8,x14,ne csel x15,x9,x15,ne csel x16,x10,x16,ne csel x17,x11,x17,ne stp x14,x15,[x21,#64] stp x16,x17,[x21,#64+16] add sp,x29,#0 // destroy frame ldp x19,x20,[x29,#16] ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x29,x30,[sp],#80 AARCH64_VALIDATE_LINK_REGISTER ret //////////////////////////////////////////////////////////////////////// // void ecp_nistz256_ord_mul_mont(uint64_t res[4], uint64_t a[4], // uint64_t b[4]); .globl _ecp_nistz256_ord_mul_mont .private_extern _ecp_nistz256_ord_mul_mont .align 4 _ecp_nistz256_ord_mul_mont: AARCH64_VALID_CALL_TARGET // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. stp x29,x30,[sp,#-64]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] adrp x23,Lord@PAGE add x23,x23,Lord@PAGEOFF ldr x3,[x2] // bp[0] ldp x4,x5,[x1] ldp x6,x7,[x1,#16] ldp x12,x13,[x23,#0] ldp x21,x22,[x23,#16] ldr x23,[x23,#32] mul x14,x4,x3 // a[0]*b[0] umulh x8,x4,x3 mul x15,x5,x3 // a[1]*b[0] umulh x9,x5,x3 mul x16,x6,x3 // a[2]*b[0] umulh x10,x6,x3 mul x17,x7,x3 // a[3]*b[0] umulh x19,x7,x3 mul x24,x14,x23 adds x15,x15,x8 // accumulate high parts of multiplication adcs x16,x16,x9 adcs x17,x17,x10 adc x19,x19,xzr mov x20,xzr ldr x3,[x2,#8*1] // b[i] lsl x8,x24,#32 subs x16,x16,x24 lsr x9,x24,#32 sbcs x17,x17,x8 sbcs x19,x19,x9 sbc x20,x20,xzr subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 mul x8,x4,x3 adc x11,x11,xzr mul x9,x5,x3 adds x14,x15,x10 mul x10,x6,x3 adcs x15,x16,x11 mul x11,x7,x3 adcs x16,x17,x24 adcs x17,x19,x24 adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts umulh x8,x4,x3 adcs x15,x15,x9 umulh x9,x5,x3 adcs x16,x16,x10 umulh x10,x6,x3 adcs x17,x17,x11 umulh x11,x7,x3 adc x19,x19,xzr mul x24,x14,x23 adds x15,x15,x8 // accumulate high parts adcs x16,x16,x9 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr ldr x3,[x2,#8*2] // b[i] lsl x8,x24,#32 subs x16,x16,x24 lsr x9,x24,#32 sbcs x17,x17,x8 sbcs x19,x19,x9 sbc x20,x20,xzr subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 mul x8,x4,x3 adc x11,x11,xzr mul x9,x5,x3 adds x14,x15,x10 mul x10,x6,x3 adcs x15,x16,x11 mul x11,x7,x3 adcs x16,x17,x24 adcs x17,x19,x24 adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts umulh x8,x4,x3 adcs x15,x15,x9 umulh x9,x5,x3 adcs x16,x16,x10 umulh x10,x6,x3 adcs x17,x17,x11 umulh x11,x7,x3 adc x19,x19,xzr mul x24,x14,x23 adds x15,x15,x8 // accumulate high parts adcs x16,x16,x9 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr ldr x3,[x2,#8*3] // b[i] lsl x8,x24,#32 subs x16,x16,x24 lsr x9,x24,#32 sbcs x17,x17,x8 sbcs x19,x19,x9 sbc x20,x20,xzr subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 mul x8,x4,x3 adc x11,x11,xzr mul x9,x5,x3 adds x14,x15,x10 mul x10,x6,x3 adcs x15,x16,x11 mul x11,x7,x3 adcs x16,x17,x24 adcs x17,x19,x24 adc x19,x20,xzr adds x14,x14,x8 // accumulate low parts umulh x8,x4,x3 adcs x15,x15,x9 umulh x9,x5,x3 adcs x16,x16,x10 umulh x10,x6,x3 adcs x17,x17,x11 umulh x11,x7,x3 adc x19,x19,xzr mul x24,x14,x23 adds x15,x15,x8 // accumulate high parts adcs x16,x16,x9 adcs x17,x17,x10 adcs x19,x19,x11 adc x20,xzr,xzr lsl x8,x24,#32 // last reduction subs x16,x16,x24 lsr x9,x24,#32 sbcs x17,x17,x8 sbcs x19,x19,x9 sbc x20,x20,xzr subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 adc x11,x11,xzr adds x14,x15,x10 adcs x15,x16,x11 adcs x16,x17,x24 adcs x17,x19,x24 adc x19,x20,xzr subs x8,x14,x12 // ret -= modulus sbcs x9,x15,x13 sbcs x10,x16,x21 sbcs x11,x17,x22 sbcs xzr,x19,xzr csel x14,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x15,x15,x9,lo csel x16,x16,x10,lo stp x14,x15,[x0] csel x17,x17,x11,lo stp x16,x17,[x0,#16] ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] ldp x23,x24,[sp,#48] ldr x29,[sp],#64 ret //////////////////////////////////////////////////////////////////////// // void ecp_nistz256_ord_sqr_mont(uint64_t res[4], uint64_t a[4], // uint64_t rep); .globl _ecp_nistz256_ord_sqr_mont .private_extern _ecp_nistz256_ord_sqr_mont .align 4 _ecp_nistz256_ord_sqr_mont: AARCH64_VALID_CALL_TARGET // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. stp x29,x30,[sp,#-64]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] adrp x23,Lord@PAGE add x23,x23,Lord@PAGEOFF ldp x4,x5,[x1] ldp x6,x7,[x1,#16] ldp x12,x13,[x23,#0] ldp x21,x22,[x23,#16] ldr x23,[x23,#32] b Loop_ord_sqr .align 4 Loop_ord_sqr: sub x2,x2,#1 //////////////////////////////////////////////////////////////// // | | | | | |a1*a0| | // | | | | |a2*a0| | | // | |a3*a2|a3*a0| | | | // | | | |a2*a1| | | | // | | |a3*a1| | | | | // *| | | | | | | | 2| // +|a3*a3|a2*a2|a1*a1|a0*a0| // |--+--+--+--+--+--+--+--| // |A7|A6|A5|A4|A3|A2|A1|A0|, where Ax is , i.e. follow // // "can't overflow" below mark carrying into high part of // multiplication result, which can't overflow, because it // can never be all ones. mul x15,x5,x4 // a[1]*a[0] umulh x9,x5,x4 mul x16,x6,x4 // a[2]*a[0] umulh x10,x6,x4 mul x17,x7,x4 // a[3]*a[0] umulh x19,x7,x4 adds x16,x16,x9 // accumulate high parts of multiplication mul x8,x6,x5 // a[2]*a[1] umulh x9,x6,x5 adcs x17,x17,x10 mul x10,x7,x5 // a[3]*a[1] umulh x11,x7,x5 adc x19,x19,xzr // can't overflow mul x20,x7,x6 // a[3]*a[2] umulh x1,x7,x6 adds x9,x9,x10 // accumulate high parts of multiplication mul x14,x4,x4 // a[0]*a[0] adc x10,x11,xzr // can't overflow adds x17,x17,x8 // accumulate low parts of multiplication umulh x4,x4,x4 adcs x19,x19,x9 mul x9,x5,x5 // a[1]*a[1] adcs x20,x20,x10 umulh x5,x5,x5 adc x1,x1,xzr // can't overflow adds x15,x15,x15 // acc[1-6]*=2 mul x10,x6,x6 // a[2]*a[2] adcs x16,x16,x16 umulh x6,x6,x6 adcs x17,x17,x17 mul x11,x7,x7 // a[3]*a[3] adcs x19,x19,x19 umulh x7,x7,x7 adcs x20,x20,x20 adcs x1,x1,x1 adc x3,xzr,xzr adds x15,x15,x4 // +a[i]*a[i] mul x24,x14,x23 adcs x16,x16,x9 adcs x17,x17,x5 adcs x19,x19,x10 adcs x20,x20,x6 adcs x1,x1,x11 adc x3,x3,x7 subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 adc x11,x11,xzr adds x14,x15,x10 adcs x15,x16,x11 adcs x16,x17,x24 adc x17,xzr,x24 // can't overflow mul x11,x14,x23 lsl x8,x24,#32 subs x15,x15,x24 lsr x9,x24,#32 sbcs x16,x16,x8 sbc x17,x17,x9 // can't borrow subs xzr,x14,#1 umulh x9,x12,x11 mul x10,x13,x11 umulh x24,x13,x11 adcs x10,x10,x9 adc x24,x24,xzr adds x14,x15,x10 adcs x15,x16,x24 adcs x16,x17,x11 adc x17,xzr,x11 // can't overflow mul x24,x14,x23 lsl x8,x11,#32 subs x15,x15,x11 lsr x9,x11,#32 sbcs x16,x16,x8 sbc x17,x17,x9 // can't borrow subs xzr,x14,#1 umulh x9,x12,x24 mul x10,x13,x24 umulh x11,x13,x24 adcs x10,x10,x9 adc x11,x11,xzr adds x14,x15,x10 adcs x15,x16,x11 adcs x16,x17,x24 adc x17,xzr,x24 // can't overflow mul x11,x14,x23 lsl x8,x24,#32 subs x15,x15,x24 lsr x9,x24,#32 sbcs x16,x16,x8 sbc x17,x17,x9 // can't borrow subs xzr,x14,#1 umulh x9,x12,x11 mul x10,x13,x11 umulh x24,x13,x11 adcs x10,x10,x9 adc x24,x24,xzr adds x14,x15,x10 adcs x15,x16,x24 adcs x16,x17,x11 adc x17,xzr,x11 // can't overflow lsl x8,x11,#32 subs x15,x15,x11 lsr x9,x11,#32 sbcs x16,x16,x8 sbc x17,x17,x9 // can't borrow adds x14,x14,x19 // accumulate upper half adcs x15,x15,x20 adcs x16,x16,x1 adcs x17,x17,x3 adc x19,xzr,xzr subs x8,x14,x12 // ret -= modulus sbcs x9,x15,x13 sbcs x10,x16,x21 sbcs x11,x17,x22 sbcs xzr,x19,xzr csel x4,x14,x8,lo // ret = borrow ? ret : ret-modulus csel x5,x15,x9,lo csel x6,x16,x10,lo csel x7,x17,x11,lo cbnz x2,Loop_ord_sqr stp x4,x5,[x0] stp x6,x7,[x0,#16] ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] ldp x23,x24,[sp,#48] ldr x29,[sp],#64 ret //////////////////////////////////////////////////////////////////////// // void ecp_nistz256_select_w5(uint64_t *val, uint64_t *in_t, int index); .globl _ecp_nistz256_select_w5 .private_extern _ecp_nistz256_select_w5 .align 4 _ecp_nistz256_select_w5: AARCH64_VALID_CALL_TARGET // x10 := x0 // w9 := 0; loop counter and incremented internal index mov x10, x0 mov w9, #0 // [v16-v21] := 0 movi v16.16b, #0 movi v17.16b, #0 movi v18.16b, #0 movi v19.16b, #0 movi v20.16b, #0 movi v21.16b, #0 Lselect_w5_loop: // Loop 16 times. // Increment index (loop counter); tested at the end of the loop add w9, w9, #1 // [v22-v27] := Load a (3*256-bit = 6*128-bit) table entry starting at x1 // and advance x1 to point to the next entry ld1 {v22.2d, v23.2d, v24.2d, v25.2d}, [x1],#64 // x11 := (w9 == w2)? All 1s : All 0s cmp w9, w2 csetm x11, eq // continue loading ... ld1 {v26.2d, v27.2d}, [x1],#32 // duplicate mask_64 into Mask (all 0s or all 1s) dup v3.2d, x11 // [v16-v19] := (Mask == all 1s)? [v22-v25] : [v16-v19] // i.e., values in output registers will remain the same if w9 != w2 bit v16.16b, v22.16b, v3.16b bit v17.16b, v23.16b, v3.16b bit v18.16b, v24.16b, v3.16b bit v19.16b, v25.16b, v3.16b bit v20.16b, v26.16b, v3.16b bit v21.16b, v27.16b, v3.16b // If bit #4 is not 0 (i.e. idx_ctr < 16) loop back tbz w9, #4, Lselect_w5_loop // Write [v16-v21] to memory at the output pointer st1 {v16.2d, v17.2d, v18.2d, v19.2d}, [x10],#64 st1 {v20.2d, v21.2d}, [x10] ret //////////////////////////////////////////////////////////////////////// // void ecp_nistz256_select_w7(uint64_t *val, uint64_t *in_t, int index); .globl _ecp_nistz256_select_w7 .private_extern _ecp_nistz256_select_w7 .align 4 _ecp_nistz256_select_w7: AARCH64_VALID_CALL_TARGET // w9 := 0; loop counter and incremented internal index mov w9, #0 // [v16-v21] := 0 movi v16.16b, #0 movi v17.16b, #0 movi v18.16b, #0 movi v19.16b, #0 Lselect_w7_loop: // Loop 64 times. // Increment index (loop counter); tested at the end of the loop add w9, w9, #1 // [v22-v25] := Load a (2*256-bit = 4*128-bit) table entry starting at x1 // and advance x1 to point to the next entry ld1 {v22.2d, v23.2d, v24.2d, v25.2d}, [x1],#64 // x11 := (w9 == w2)? All 1s : All 0s cmp w9, w2 csetm x11, eq // duplicate mask_64 into Mask (all 0s or all 1s) dup v3.2d, x11 // [v16-v19] := (Mask == all 1s)? [v22-v25] : [v16-v19] // i.e., values in output registers will remain the same if w9 != w2 bit v16.16b, v22.16b, v3.16b bit v17.16b, v23.16b, v3.16b bit v18.16b, v24.16b, v3.16b bit v19.16b, v25.16b, v3.16b // If bit #6 is not 0 (i.e. idx_ctr < 64) loop back tbz w9, #6, Lselect_w7_loop // Write [v16-v19] to memory at the output pointer st1 {v16.2d, v17.2d, v18.2d, v19.2d}, [x0] ret #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
45,148
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/md5-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) .text .globl _md5_block_asm_data_order .private_extern _md5_block_asm_data_order _md5_block_asm_data_order: // Save all callee-saved registers stp x19,x20,[sp,#-80]! stp x21,x22,[sp,#16] stp x23,x24,[sp,#32] stp x25,x26,[sp,#48] stp x27,x28,[sp,#64] ldp w10, w11, [x0, #0] // Load MD5 state->A and state->B ldp w12, w13, [x0, #8] // Load MD5 state->C and state->D .align 5 md5_blocks_loop: eor x17, x12, x13 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) and x16, x17, x11 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) ldp x15, x3, [x1] // Load 4 words of input data0 M[0]/0 eor x14, x16, x13 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x9, #0xa478 // Load lower half of constant 0xd76aa478 movk x9, #0xd76a, lsl #16 // Load upper half of constant 0xd76aa478 add w8, w10, w15 // Add dest value add w7, w8, w9 // Add constant 0xd76aa478 add w6, w7, w14 // Add aux function result ror w6, w6, #25 // Rotate left s=7 bits eor x5, x11, x12 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w4, w11, w6 // Add X parameter round 1 A=FF(A, B, C, D, 0xd76aa478, s=7, M[0]) and x8, x5, x4 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x17, x8, x12 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x16, #0xb756 // Load lower half of constant 0xe8c7b756 movk x16, #0xe8c7, lsl #16 // Load upper half of constant 0xe8c7b756 lsr x20, x15, #32 // Right shift high input value containing M[1] add w9, w13, w20 // Add dest value add w7, w9, w16 // Add constant 0xe8c7b756 add w14, w7, w17 // Add aux function result ror w14, w14, #20 // Rotate left s=12 bits eor x6, x4, x11 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w5, w4, w14 // Add X parameter round 1 D=FF(D, A, B, C, 0xe8c7b756, s=12, M[1]) and x8, x6, x5 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x9, x8, x11 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x16, #0x70db // Load lower half of constant 0x242070db movk x16, #0x2420, lsl #16 // Load upper half of constant 0x242070db add w7, w12, w3 // Add dest value add w17, w7, w16 // Add constant 0x242070db add w14, w17, w9 // Add aux function result ror w14, w14, #15 // Rotate left s=17 bits eor x6, x5, x4 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w8, w5, w14 // Add X parameter round 1 C=FF(C, D, A, B, 0x242070db, s=17, M[2]) and x7, x6, x8 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x16, x7, x4 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x9, #0xceee // Load lower half of constant 0xc1bdceee movk x9, #0xc1bd, lsl #16 // Load upper half of constant 0xc1bdceee lsr x21, x3, #32 // Right shift high input value containing M[3] add w14, w11, w21 // Add dest value add w6, w14, w9 // Add constant 0xc1bdceee add w7, w6, w16 // Add aux function result ror w7, w7, #10 // Rotate left s=22 bits eor x17, x8, x5 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w9, w8, w7 // Add X parameter round 1 B=FF(B, C, D, A, 0xc1bdceee, s=22, M[3]) ldp x14, x7, [x1, #16] // Load 4 words of input data0 M[4]/0w and x16, x17, x9 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x16, x5 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x16, #0xfaf // Load lower half of constant 0xf57c0faf movk x16, #0xf57c, lsl #16 // Load upper half of constant 0xf57c0faf add w17, w4, w14 // Add dest value add w16, w17, w16 // Add constant 0xf57c0faf add w4, w16, w6 // Add aux function result ror w4, w4, #25 // Rotate left s=7 bits eor x16, x9, x8 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w17, w9, w4 // Add X parameter round 1 A=FF(A, B, C, D, 0xf57c0faf, s=7, M[4]) and x16, x16, x17 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x16, x8 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x4, #0xc62a // Load lower half of constant 0x4787c62a movk x4, #0x4787, lsl #16 // Load upper half of constant 0x4787c62a lsr x22, x14, #32 // Right shift high input value containing M[5] add w16, w5, w22 // Add dest value add w16, w16, w4 // Add constant 0x4787c62a add w5, w16, w6 // Add aux function result ror w5, w5, #20 // Rotate left s=12 bits eor x4, x17, x9 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w19, w17, w5 // Add X parameter round 1 D=FF(D, A, B, C, 0x4787c62a, s=12, M[5]) and x6, x4, x19 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x5, x6, x9 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x4, #0x4613 // Load lower half of constant 0xa8304613 movk x4, #0xa830, lsl #16 // Load upper half of constant 0xa8304613 add w6, w8, w7 // Add dest value add w8, w6, w4 // Add constant 0xa8304613 add w4, w8, w5 // Add aux function result ror w4, w4, #15 // Rotate left s=17 bits eor x6, x19, x17 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w8, w19, w4 // Add X parameter round 1 C=FF(C, D, A, B, 0xa8304613, s=17, M[6]) and x5, x6, x8 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x4, x5, x17 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x6, #0x9501 // Load lower half of constant 0xfd469501 movk x6, #0xfd46, lsl #16 // Load upper half of constant 0xfd469501 lsr x23, x7, #32 // Right shift high input value containing M[7] add w9, w9, w23 // Add dest value add w5, w9, w6 // Add constant 0xfd469501 add w9, w5, w4 // Add aux function result ror w9, w9, #10 // Rotate left s=22 bits eor x6, x8, x19 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w4, w8, w9 // Add X parameter round 1 B=FF(B, C, D, A, 0xfd469501, s=22, M[7]) ldp x5, x16, [x1, #32] // Load 4 words of input data0 M[8]/0 and x9, x6, x4 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x9, x19 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x9, #0x98d8 // Load lower half of constant 0x698098d8 movk x9, #0x6980, lsl #16 // Load upper half of constant 0x698098d8 add w17, w17, w5 // Add dest value add w9, w17, w9 // Add constant 0x698098d8 add w17, w9, w6 // Add aux function result ror w17, w17, #25 // Rotate left s=7 bits eor x9, x4, x8 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w6, w4, w17 // Add X parameter round 1 A=FF(A, B, C, D, 0x698098d8, s=7, M[8]) and x17, x9, x6 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x9, x17, x8 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x17, #0xf7af // Load lower half of constant 0x8b44f7af movk x17, #0x8b44, lsl #16 // Load upper half of constant 0x8b44f7af lsr x24, x5, #32 // Right shift high input value containing M[9] add w19, w19, w24 // Add dest value add w17, w19, w17 // Add constant 0x8b44f7af add w19, w17, w9 // Add aux function result ror w19, w19, #20 // Rotate left s=12 bits eor x9, x6, x4 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w17, w6, w19 // Add X parameter round 1 D=FF(D, A, B, C, 0x8b44f7af, s=12, M[9]) and x9, x9, x17 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x9, x9, x4 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x11, #0x5bb1 // Load lower half of constant 0xffff5bb1 movk x11, #0xffff, lsl #16 // Load upper half of constant 0xffff5bb1 add w8, w8, w16 // Add dest value add w8, w8, w11 // Add constant 0xffff5bb1 add w8, w8, w9 // Add aux function result ror w8, w8, #15 // Rotate left s=17 bits eor x9, x17, x6 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w8, w17, w8 // Add X parameter round 1 C=FF(C, D, A, B, 0xffff5bb1, s=17, M[10]) and x9, x9, x8 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x9, x9, x6 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x11, #0xd7be // Load lower half of constant 0x895cd7be movk x11, #0x895c, lsl #16 // Load upper half of constant 0x895cd7be lsr x25, x16, #32 // Right shift high input value containing M[11] add w4, w4, w25 // Add dest value add w4, w4, w11 // Add constant 0x895cd7be add w9, w4, w9 // Add aux function result ror w9, w9, #10 // Rotate left s=22 bits eor x4, x8, x17 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w9, w8, w9 // Add X parameter round 1 B=FF(B, C, D, A, 0x895cd7be, s=22, M[11]) ldp x11, x12, [x1, #48] // Load 4 words of input data0 M[12]/0 and x4, x4, x9 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x4, x4, x17 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x19, #0x1122 // Load lower half of constant 0x6b901122 movk x19, #0x6b90, lsl #16 // Load upper half of constant 0x6b901122 add w6, w6, w11 // Add dest value add w6, w6, w19 // Add constant 0x6b901122 add w4, w6, w4 // Add aux function result ror w4, w4, #25 // Rotate left s=7 bits eor x6, x9, x8 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w4, w9, w4 // Add X parameter round 1 A=FF(A, B, C, D, 0x6b901122, s=7, M[12]) and x6, x6, x4 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x6, x8 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x19, #0x7193 // Load lower half of constant 0xfd987193 movk x19, #0xfd98, lsl #16 // Load upper half of constant 0xfd987193 lsr x26, x11, #32 // Right shift high input value containing M[13] add w17, w17, w26 // Add dest value add w17, w17, w19 // Add constant 0xfd987193 add w17, w17, w6 // Add aux function result ror w17, w17, #20 // Rotate left s=12 bits eor x6, x4, x9 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w17, w4, w17 // Add X parameter round 1 D=FF(D, A, B, C, 0xfd987193, s=12, M[13]) and x6, x6, x17 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x6, x9 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x13, #0x438e // Load lower half of constant 0xa679438e movk x13, #0xa679, lsl #16 // Load upper half of constant 0xa679438e add w8, w8, w12 // Add dest value add w8, w8, w13 // Add constant 0xa679438e add w8, w8, w6 // Add aux function result ror w8, w8, #15 // Rotate left s=17 bits eor x6, x17, x4 // Begin aux function round 1 F(x,y,z)=(((y^z)&x)^z) add w8, w17, w8 // Add X parameter round 1 C=FF(C, D, A, B, 0xa679438e, s=17, M[14]) and x6, x6, x8 // Continue aux function round 1 F(x,y,z)=(((y^z)&x)^z) eor x6, x6, x4 // End aux function round 1 F(x,y,z)=(((y^z)&x)^z) movz x13, #0x821 // Load lower half of constant 0x49b40821 movk x13, #0x49b4, lsl #16 // Load upper half of constant 0x49b40821 lsr x27, x12, #32 // Right shift high input value containing M[15] add w9, w9, w27 // Add dest value add w9, w9, w13 // Add constant 0x49b40821 add w9, w9, w6 // Add aux function result ror w9, w9, #10 // Rotate left s=22 bits bic x6, x8, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w9, w8, w9 // Add X parameter round 1 B=FF(B, C, D, A, 0x49b40821, s=22, M[15]) and x13, x9, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x2562 // Load lower half of constant 0xf61e2562 movk x13, #0xf61e, lsl #16 // Load upper half of constant 0xf61e2562 add w4, w4, w20 // Add dest value add w4, w4, w13 // Add constant 0xf61e2562 add w4, w4, w6 // Add aux function result ror w4, w4, #27 // Rotate left s=5 bits bic x6, x9, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w4, w9, w4 // Add X parameter round 2 A=GG(A, B, C, D, 0xf61e2562, s=5, M[1]) and x13, x4, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xb340 // Load lower half of constant 0xc040b340 movk x13, #0xc040, lsl #16 // Load upper half of constant 0xc040b340 add w17, w17, w7 // Add dest value add w17, w17, w13 // Add constant 0xc040b340 add w17, w17, w6 // Add aux function result ror w17, w17, #23 // Rotate left s=9 bits bic x6, x4, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w17, w4, w17 // Add X parameter round 2 D=GG(D, A, B, C, 0xc040b340, s=9, M[6]) and x13, x17, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x5a51 // Load lower half of constant 0x265e5a51 movk x13, #0x265e, lsl #16 // Load upper half of constant 0x265e5a51 add w8, w8, w25 // Add dest value add w8, w8, w13 // Add constant 0x265e5a51 add w8, w8, w6 // Add aux function result ror w8, w8, #18 // Rotate left s=14 bits bic x6, x17, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w8, w17, w8 // Add X parameter round 2 C=GG(C, D, A, B, 0x265e5a51, s=14, M[11]) and x13, x8, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xc7aa // Load lower half of constant 0xe9b6c7aa movk x13, #0xe9b6, lsl #16 // Load upper half of constant 0xe9b6c7aa add w9, w9, w15 // Add dest value add w9, w9, w13 // Add constant 0xe9b6c7aa add w9, w9, w6 // Add aux function result ror w9, w9, #12 // Rotate left s=20 bits bic x6, x8, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w9, w8, w9 // Add X parameter round 2 B=GG(B, C, D, A, 0xe9b6c7aa, s=20, M[0]) and x13, x9, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x105d // Load lower half of constant 0xd62f105d movk x13, #0xd62f, lsl #16 // Load upper half of constant 0xd62f105d add w4, w4, w22 // Add dest value add w4, w4, w13 // Add constant 0xd62f105d add w4, w4, w6 // Add aux function result ror w4, w4, #27 // Rotate left s=5 bits bic x6, x9, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w4, w9, w4 // Add X parameter round 2 A=GG(A, B, C, D, 0xd62f105d, s=5, M[5]) and x13, x4, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x1453 // Load lower half of constant 0x2441453 movk x13, #0x244, lsl #16 // Load upper half of constant 0x2441453 add w17, w17, w16 // Add dest value add w17, w17, w13 // Add constant 0x2441453 add w17, w17, w6 // Add aux function result ror w17, w17, #23 // Rotate left s=9 bits bic x6, x4, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w17, w4, w17 // Add X parameter round 2 D=GG(D, A, B, C, 0x2441453, s=9, M[10]) and x13, x17, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xe681 // Load lower half of constant 0xd8a1e681 movk x13, #0xd8a1, lsl #16 // Load upper half of constant 0xd8a1e681 add w8, w8, w27 // Add dest value add w8, w8, w13 // Add constant 0xd8a1e681 add w8, w8, w6 // Add aux function result ror w8, w8, #18 // Rotate left s=14 bits bic x6, x17, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w8, w17, w8 // Add X parameter round 2 C=GG(C, D, A, B, 0xd8a1e681, s=14, M[15]) and x13, x8, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xfbc8 // Load lower half of constant 0xe7d3fbc8 movk x13, #0xe7d3, lsl #16 // Load upper half of constant 0xe7d3fbc8 add w9, w9, w14 // Add dest value add w9, w9, w13 // Add constant 0xe7d3fbc8 add w9, w9, w6 // Add aux function result ror w9, w9, #12 // Rotate left s=20 bits bic x6, x8, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w9, w8, w9 // Add X parameter round 2 B=GG(B, C, D, A, 0xe7d3fbc8, s=20, M[4]) and x13, x9, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xcde6 // Load lower half of constant 0x21e1cde6 movk x13, #0x21e1, lsl #16 // Load upper half of constant 0x21e1cde6 add w4, w4, w24 // Add dest value add w4, w4, w13 // Add constant 0x21e1cde6 add w4, w4, w6 // Add aux function result ror w4, w4, #27 // Rotate left s=5 bits bic x6, x9, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w4, w9, w4 // Add X parameter round 2 A=GG(A, B, C, D, 0x21e1cde6, s=5, M[9]) and x13, x4, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x7d6 // Load lower half of constant 0xc33707d6 movk x13, #0xc337, lsl #16 // Load upper half of constant 0xc33707d6 add w17, w17, w12 // Add dest value add w17, w17, w13 // Add constant 0xc33707d6 add w17, w17, w6 // Add aux function result ror w17, w17, #23 // Rotate left s=9 bits bic x6, x4, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w17, w4, w17 // Add X parameter round 2 D=GG(D, A, B, C, 0xc33707d6, s=9, M[14]) and x13, x17, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xd87 // Load lower half of constant 0xf4d50d87 movk x13, #0xf4d5, lsl #16 // Load upper half of constant 0xf4d50d87 add w8, w8, w21 // Add dest value add w8, w8, w13 // Add constant 0xf4d50d87 add w8, w8, w6 // Add aux function result ror w8, w8, #18 // Rotate left s=14 bits bic x6, x17, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w8, w17, w8 // Add X parameter round 2 C=GG(C, D, A, B, 0xf4d50d87, s=14, M[3]) and x13, x8, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x14ed // Load lower half of constant 0x455a14ed movk x13, #0x455a, lsl #16 // Load upper half of constant 0x455a14ed add w9, w9, w5 // Add dest value add w9, w9, w13 // Add constant 0x455a14ed add w9, w9, w6 // Add aux function result ror w9, w9, #12 // Rotate left s=20 bits bic x6, x8, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w9, w8, w9 // Add X parameter round 2 B=GG(B, C, D, A, 0x455a14ed, s=20, M[8]) and x13, x9, x17 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xe905 // Load lower half of constant 0xa9e3e905 movk x13, #0xa9e3, lsl #16 // Load upper half of constant 0xa9e3e905 add w4, w4, w26 // Add dest value add w4, w4, w13 // Add constant 0xa9e3e905 add w4, w4, w6 // Add aux function result ror w4, w4, #27 // Rotate left s=5 bits bic x6, x9, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w4, w9, w4 // Add X parameter round 2 A=GG(A, B, C, D, 0xa9e3e905, s=5, M[13]) and x13, x4, x8 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0xa3f8 // Load lower half of constant 0xfcefa3f8 movk x13, #0xfcef, lsl #16 // Load upper half of constant 0xfcefa3f8 add w17, w17, w3 // Add dest value add w17, w17, w13 // Add constant 0xfcefa3f8 add w17, w17, w6 // Add aux function result ror w17, w17, #23 // Rotate left s=9 bits bic x6, x4, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w17, w4, w17 // Add X parameter round 2 D=GG(D, A, B, C, 0xfcefa3f8, s=9, M[2]) and x13, x17, x9 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x2d9 // Load lower half of constant 0x676f02d9 movk x13, #0x676f, lsl #16 // Load upper half of constant 0x676f02d9 add w8, w8, w23 // Add dest value add w8, w8, w13 // Add constant 0x676f02d9 add w8, w8, w6 // Add aux function result ror w8, w8, #18 // Rotate left s=14 bits bic x6, x17, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) add w8, w17, w8 // Add X parameter round 2 C=GG(C, D, A, B, 0x676f02d9, s=14, M[7]) and x13, x8, x4 // Aux function round 2 G(x,y,z)=((x&z)|(~z&y)) orr x6, x6, x13 // End aux function round 2 G(x,y,z)=((x&z)|(~z&y)) movz x13, #0x4c8a // Load lower half of constant 0x8d2a4c8a movk x13, #0x8d2a, lsl #16 // Load upper half of constant 0x8d2a4c8a add w9, w9, w11 // Add dest value add w9, w9, w13 // Add constant 0x8d2a4c8a add w9, w9, w6 // Add aux function result eor x6, x8, x17 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w9, w9, #12 // Rotate left s=20 bits movz x10, #0x3942 // Load lower half of constant 0xfffa3942 add w9, w8, w9 // Add X parameter round 2 B=GG(B, C, D, A, 0x8d2a4c8a, s=20, M[12]) movk x10, #0xfffa, lsl #16 // Load upper half of constant 0xfffa3942 add w4, w4, w22 // Add dest value eor x6, x6, x9 // End aux function round 3 H(x,y,z)=(x^y^z) add w4, w4, w10 // Add constant 0xfffa3942 add w4, w4, w6 // Add aux function result ror w4, w4, #28 // Rotate left s=4 bits eor x6, x9, x8 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x10, #0xf681 // Load lower half of constant 0x8771f681 add w4, w9, w4 // Add X parameter round 3 A=HH(A, B, C, D, 0xfffa3942, s=4, M[5]) movk x10, #0x8771, lsl #16 // Load upper half of constant 0x8771f681 add w17, w17, w5 // Add dest value eor x6, x6, x4 // End aux function round 3 H(x,y,z)=(x^y^z) add w17, w17, w10 // Add constant 0x8771f681 add w17, w17, w6 // Add aux function result eor x6, x4, x9 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w17, w17, #21 // Rotate left s=11 bits movz x13, #0x6122 // Load lower half of constant 0x6d9d6122 add w17, w4, w17 // Add X parameter round 3 D=HH(D, A, B, C, 0x8771f681, s=11, M[8]) movk x13, #0x6d9d, lsl #16 // Load upper half of constant 0x6d9d6122 add w8, w8, w25 // Add dest value eor x6, x6, x17 // End aux function round 3 H(x,y,z)=(x^y^z) add w8, w8, w13 // Add constant 0x6d9d6122 add w8, w8, w6 // Add aux function result ror w8, w8, #16 // Rotate left s=16 bits eor x6, x17, x4 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x13, #0x380c // Load lower half of constant 0xfde5380c add w8, w17, w8 // Add X parameter round 3 C=HH(C, D, A, B, 0x6d9d6122, s=16, M[11]) movk x13, #0xfde5, lsl #16 // Load upper half of constant 0xfde5380c add w9, w9, w12 // Add dest value eor x6, x6, x8 // End aux function round 3 H(x,y,z)=(x^y^z) add w9, w9, w13 // Add constant 0xfde5380c add w9, w9, w6 // Add aux function result eor x6, x8, x17 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w9, w9, #9 // Rotate left s=23 bits movz x10, #0xea44 // Load lower half of constant 0xa4beea44 add w9, w8, w9 // Add X parameter round 3 B=HH(B, C, D, A, 0xfde5380c, s=23, M[14]) movk x10, #0xa4be, lsl #16 // Load upper half of constant 0xa4beea44 add w4, w4, w20 // Add dest value eor x6, x6, x9 // End aux function round 3 H(x,y,z)=(x^y^z) add w4, w4, w10 // Add constant 0xa4beea44 add w4, w4, w6 // Add aux function result ror w4, w4, #28 // Rotate left s=4 bits eor x6, x9, x8 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x10, #0xcfa9 // Load lower half of constant 0x4bdecfa9 add w4, w9, w4 // Add X parameter round 3 A=HH(A, B, C, D, 0xa4beea44, s=4, M[1]) movk x10, #0x4bde, lsl #16 // Load upper half of constant 0x4bdecfa9 add w17, w17, w14 // Add dest value eor x6, x6, x4 // End aux function round 3 H(x,y,z)=(x^y^z) add w17, w17, w10 // Add constant 0x4bdecfa9 add w17, w17, w6 // Add aux function result eor x6, x4, x9 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w17, w17, #21 // Rotate left s=11 bits movz x13, #0x4b60 // Load lower half of constant 0xf6bb4b60 add w17, w4, w17 // Add X parameter round 3 D=HH(D, A, B, C, 0x4bdecfa9, s=11, M[4]) movk x13, #0xf6bb, lsl #16 // Load upper half of constant 0xf6bb4b60 add w8, w8, w23 // Add dest value eor x6, x6, x17 // End aux function round 3 H(x,y,z)=(x^y^z) add w8, w8, w13 // Add constant 0xf6bb4b60 add w8, w8, w6 // Add aux function result ror w8, w8, #16 // Rotate left s=16 bits eor x6, x17, x4 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x13, #0xbc70 // Load lower half of constant 0xbebfbc70 add w8, w17, w8 // Add X parameter round 3 C=HH(C, D, A, B, 0xf6bb4b60, s=16, M[7]) movk x13, #0xbebf, lsl #16 // Load upper half of constant 0xbebfbc70 add w9, w9, w16 // Add dest value eor x6, x6, x8 // End aux function round 3 H(x,y,z)=(x^y^z) add w9, w9, w13 // Add constant 0xbebfbc70 add w9, w9, w6 // Add aux function result eor x6, x8, x17 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w9, w9, #9 // Rotate left s=23 bits movz x10, #0x7ec6 // Load lower half of constant 0x289b7ec6 add w9, w8, w9 // Add X parameter round 3 B=HH(B, C, D, A, 0xbebfbc70, s=23, M[10]) movk x10, #0x289b, lsl #16 // Load upper half of constant 0x289b7ec6 add w4, w4, w26 // Add dest value eor x6, x6, x9 // End aux function round 3 H(x,y,z)=(x^y^z) add w4, w4, w10 // Add constant 0x289b7ec6 add w4, w4, w6 // Add aux function result ror w4, w4, #28 // Rotate left s=4 bits eor x6, x9, x8 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x10, #0x27fa // Load lower half of constant 0xeaa127fa add w4, w9, w4 // Add X parameter round 3 A=HH(A, B, C, D, 0x289b7ec6, s=4, M[13]) movk x10, #0xeaa1, lsl #16 // Load upper half of constant 0xeaa127fa add w17, w17, w15 // Add dest value eor x6, x6, x4 // End aux function round 3 H(x,y,z)=(x^y^z) add w17, w17, w10 // Add constant 0xeaa127fa add w17, w17, w6 // Add aux function result eor x6, x4, x9 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w17, w17, #21 // Rotate left s=11 bits movz x13, #0x3085 // Load lower half of constant 0xd4ef3085 add w17, w4, w17 // Add X parameter round 3 D=HH(D, A, B, C, 0xeaa127fa, s=11, M[0]) movk x13, #0xd4ef, lsl #16 // Load upper half of constant 0xd4ef3085 add w8, w8, w21 // Add dest value eor x6, x6, x17 // End aux function round 3 H(x,y,z)=(x^y^z) add w8, w8, w13 // Add constant 0xd4ef3085 add w8, w8, w6 // Add aux function result ror w8, w8, #16 // Rotate left s=16 bits eor x6, x17, x4 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x13, #0x1d05 // Load lower half of constant 0x4881d05 add w8, w17, w8 // Add X parameter round 3 C=HH(C, D, A, B, 0xd4ef3085, s=16, M[3]) movk x13, #0x488, lsl #16 // Load upper half of constant 0x4881d05 add w9, w9, w7 // Add dest value eor x6, x6, x8 // End aux function round 3 H(x,y,z)=(x^y^z) add w9, w9, w13 // Add constant 0x4881d05 add w9, w9, w6 // Add aux function result eor x6, x8, x17 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w9, w9, #9 // Rotate left s=23 bits movz x10, #0xd039 // Load lower half of constant 0xd9d4d039 add w9, w8, w9 // Add X parameter round 3 B=HH(B, C, D, A, 0x4881d05, s=23, M[6]) movk x10, #0xd9d4, lsl #16 // Load upper half of constant 0xd9d4d039 add w4, w4, w24 // Add dest value eor x6, x6, x9 // End aux function round 3 H(x,y,z)=(x^y^z) add w4, w4, w10 // Add constant 0xd9d4d039 add w4, w4, w6 // Add aux function result ror w4, w4, #28 // Rotate left s=4 bits eor x6, x9, x8 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x10, #0x99e5 // Load lower half of constant 0xe6db99e5 add w4, w9, w4 // Add X parameter round 3 A=HH(A, B, C, D, 0xd9d4d039, s=4, M[9]) movk x10, #0xe6db, lsl #16 // Load upper half of constant 0xe6db99e5 add w17, w17, w11 // Add dest value eor x6, x6, x4 // End aux function round 3 H(x,y,z)=(x^y^z) add w17, w17, w10 // Add constant 0xe6db99e5 add w17, w17, w6 // Add aux function result eor x6, x4, x9 // Begin aux function round 3 H(x,y,z)=(x^y^z) ror w17, w17, #21 // Rotate left s=11 bits movz x13, #0x7cf8 // Load lower half of constant 0x1fa27cf8 add w17, w4, w17 // Add X parameter round 3 D=HH(D, A, B, C, 0xe6db99e5, s=11, M[12]) movk x13, #0x1fa2, lsl #16 // Load upper half of constant 0x1fa27cf8 add w8, w8, w27 // Add dest value eor x6, x6, x17 // End aux function round 3 H(x,y,z)=(x^y^z) add w8, w8, w13 // Add constant 0x1fa27cf8 add w8, w8, w6 // Add aux function result ror w8, w8, #16 // Rotate left s=16 bits eor x6, x17, x4 // Begin aux function round 3 H(x,y,z)=(x^y^z) movz x13, #0x5665 // Load lower half of constant 0xc4ac5665 add w8, w17, w8 // Add X parameter round 3 C=HH(C, D, A, B, 0x1fa27cf8, s=16, M[15]) movk x13, #0xc4ac, lsl #16 // Load upper half of constant 0xc4ac5665 add w9, w9, w3 // Add dest value eor x6, x6, x8 // End aux function round 3 H(x,y,z)=(x^y^z) add w9, w9, w13 // Add constant 0xc4ac5665 add w9, w9, w6 // Add aux function result ror w9, w9, #9 // Rotate left s=23 bits movz x6, #0x2244 // Load lower half of constant 0xf4292244 movk x6, #0xf429, lsl #16 // Load upper half of constant 0xf4292244 add w9, w8, w9 // Add X parameter round 3 B=HH(B, C, D, A, 0xc4ac5665, s=23, M[2]) add w4, w4, w15 // Add dest value orn x13, x9, x17 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w4, w4, w6 // Add constant 0xf4292244 eor x6, x8, x13 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w4, w4, w6 // Add aux function result ror w4, w4, #26 // Rotate left s=6 bits movz x6, #0xff97 // Load lower half of constant 0x432aff97 movk x6, #0x432a, lsl #16 // Load upper half of constant 0x432aff97 add w4, w9, w4 // Add X parameter round 4 A=II(A, B, C, D, 0xf4292244, s=6, M[0]) orn x10, x4, x8 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w17, w17, w23 // Add dest value eor x10, x9, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w17, w17, w6 // Add constant 0x432aff97 add w6, w17, w10 // Add aux function result ror w6, w6, #22 // Rotate left s=10 bits movz x17, #0x23a7 // Load lower half of constant 0xab9423a7 movk x17, #0xab94, lsl #16 // Load upper half of constant 0xab9423a7 add w6, w4, w6 // Add X parameter round 4 D=II(D, A, B, C, 0x432aff97, s=10, M[7]) add w8, w8, w12 // Add dest value orn x10, x6, x9 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w8, w17 // Add constant 0xab9423a7 eor x17, x4, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w8, w17 // Add aux function result ror w8, w8, #17 // Rotate left s=15 bits movz x17, #0xa039 // Load lower half of constant 0xfc93a039 movk x17, #0xfc93, lsl #16 // Load upper half of constant 0xfc93a039 add w8, w6, w8 // Add X parameter round 4 C=II(C, D, A, B, 0xab9423a7, s=15, M[14]) orn x13, x8, x4 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w9, w22 // Add dest value eor x13, x6, x13 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w9, w17 // Add constant 0xfc93a039 add w17, w9, w13 // Add aux function result ror w17, w17, #11 // Rotate left s=21 bits movz x9, #0x59c3 // Load lower half of constant 0x655b59c3 movk x9, #0x655b, lsl #16 // Load upper half of constant 0x655b59c3 add w17, w8, w17 // Add X parameter round 4 B=II(B, C, D, A, 0xfc93a039, s=21, M[5]) add w4, w4, w11 // Add dest value orn x13, x17, x6 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w4, w9 // Add constant 0x655b59c3 eor x4, x8, x13 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w9, w4 // Add aux function result ror w9, w9, #26 // Rotate left s=6 bits movz x4, #0xcc92 // Load lower half of constant 0x8f0ccc92 movk x4, #0x8f0c, lsl #16 // Load upper half of constant 0x8f0ccc92 add w9, w17, w9 // Add X parameter round 4 A=II(A, B, C, D, 0x655b59c3, s=6, M[12]) orn x10, x9, x8 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w6, w6, w21 // Add dest value eor x10, x17, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w4, w6, w4 // Add constant 0x8f0ccc92 add w6, w4, w10 // Add aux function result ror w6, w6, #22 // Rotate left s=10 bits movz x4, #0xf47d // Load lower half of constant 0xffeff47d movk x4, #0xffef, lsl #16 // Load upper half of constant 0xffeff47d add w6, w9, w6 // Add X parameter round 4 D=II(D, A, B, C, 0x8f0ccc92, s=10, M[3]) add w8, w8, w16 // Add dest value orn x10, x6, x17 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w8, w4 // Add constant 0xffeff47d eor x4, x9, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w8, w4 // Add aux function result ror w8, w8, #17 // Rotate left s=15 bits movz x4, #0x5dd1 // Load lower half of constant 0x85845dd1 movk x4, #0x8584, lsl #16 // Load upper half of constant 0x85845dd1 add w8, w6, w8 // Add X parameter round 4 C=II(C, D, A, B, 0xffeff47d, s=15, M[10]) orn x10, x8, x9 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w17, w20 // Add dest value eor x17, x6, x10 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w15, w4 // Add constant 0x85845dd1 add w4, w15, w17 // Add aux function result ror w4, w4, #11 // Rotate left s=21 bits movz x15, #0x7e4f // Load lower half of constant 0x6fa87e4f movk x15, #0x6fa8, lsl #16 // Load upper half of constant 0x6fa87e4f add w17, w8, w4 // Add X parameter round 4 B=II(B, C, D, A, 0x85845dd1, s=21, M[1]) add w4, w9, w5 // Add dest value orn x9, x17, x6 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w4, w15 // Add constant 0x6fa87e4f eor x4, x8, x9 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w15, w4 // Add aux function result ror w9, w9, #26 // Rotate left s=6 bits movz x15, #0xe6e0 // Load lower half of constant 0xfe2ce6e0 movk x15, #0xfe2c, lsl #16 // Load upper half of constant 0xfe2ce6e0 add w4, w17, w9 // Add X parameter round 4 A=II(A, B, C, D, 0x6fa87e4f, s=6, M[8]) orn x9, x4, x8 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w6, w6, w27 // Add dest value eor x9, x17, x9 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w6, w15 // Add constant 0xfe2ce6e0 add w6, w15, w9 // Add aux function result ror w6, w6, #22 // Rotate left s=10 bits movz x9, #0x4314 // Load lower half of constant 0xa3014314 movk x9, #0xa301, lsl #16 // Load upper half of constant 0xa3014314 add w15, w4, w6 // Add X parameter round 4 D=II(D, A, B, C, 0xfe2ce6e0, s=10, M[15]) add w6, w8, w7 // Add dest value orn x7, x15, x17 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w6, w9 // Add constant 0xa3014314 eor x9, x4, x7 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w6, w8, w9 // Add aux function result ror w6, w6, #17 // Rotate left s=15 bits movz x7, #0x11a1 // Load lower half of constant 0x4e0811a1 movk x7, #0x4e08, lsl #16 // Load upper half of constant 0x4e0811a1 add w8, w15, w6 // Add X parameter round 4 C=II(C, D, A, B, 0xa3014314, s=15, M[6]) orn x9, x8, x4 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w6, w17, w26 // Add dest value eor x17, x15, x9 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w9, w6, w7 // Add constant 0x4e0811a1 add w7, w9, w17 // Add aux function result ror w7, w7, #11 // Rotate left s=21 bits movz x6, #0x7e82 // Load lower half of constant 0xf7537e82 movk x6, #0xf753, lsl #16 // Load upper half of constant 0xf7537e82 add w9, w8, w7 // Add X parameter round 4 B=II(B, C, D, A, 0x4e0811a1, s=21, M[13]) add w17, w4, w14 // Add dest value orn x7, x9, x15 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w14, w17, w6 // Add constant 0xf7537e82 eor x4, x8, x7 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w17, w14, w4 // Add aux function result ror w17, w17, #26 // Rotate left s=6 bits movz x6, #0xf235 // Load lower half of constant 0xbd3af235 movk x6, #0xbd3a, lsl #16 // Load upper half of constant 0xbd3af235 add w7, w9, w17 // Add X parameter round 4 A=II(A, B, C, D, 0xf7537e82, s=6, M[4]) orn x14, x7, x8 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w4, w15, w25 // Add dest value eor x17, x9, x14 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w4, w6 // Add constant 0xbd3af235 add w16, w15, w17 // Add aux function result ror w16, w16, #22 // Rotate left s=10 bits movz x14, #0xd2bb // Load lower half of constant 0x2ad7d2bb movk x14, #0x2ad7, lsl #16 // Load upper half of constant 0x2ad7d2bb add w4, w7, w16 // Add X parameter round 4 D=II(D, A, B, C, 0xbd3af235, s=10, M[11]) add w6, w8, w3 // Add dest value orn x15, x4, x9 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w17, w6, w14 // Add constant 0x2ad7d2bb eor x16, x7, x15 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w8, w17, w16 // Add aux function result ror w8, w8, #17 // Rotate left s=15 bits movz x3, #0xd391 // Load lower half of constant 0xeb86d391 movk x3, #0xeb86, lsl #16 // Load upper half of constant 0xeb86d391 add w14, w4, w8 // Add X parameter round 4 C=II(C, D, A, B, 0x2ad7d2bb, s=15, M[2]) orn x6, x14, x7 // Begin aux function round 4 I(x,y,z)=((~z|x)^y) add w15, w9, w24 // Add dest value eor x17, x4, x6 // End aux function round 4 I(x,y,z)=((~z|x)^y) add w16, w15, w3 // Add constant 0xeb86d391 add w8, w16, w17 // Add aux function result ror w8, w8, #11 // Rotate left s=21 bits ldp w6, w15, [x0] // Reload MD5 state->A and state->B ldp w5, w9, [x0, #8] // Reload MD5 state->C and state->D add w3, w14, w8 // Add X parameter round 4 B=II(B, C, D, A, 0xeb86d391, s=21, M[9]) add w13, w4, w9 // Add result of MD5 rounds to state->D add w12, w14, w5 // Add result of MD5 rounds to state->C add w10, w7, w6 // Add result of MD5 rounds to state->A add w11, w3, w15 // Add result of MD5 rounds to state->B stp w12, w13, [x0, #8] // Store MD5 states C,D stp w10, w11, [x0] // Store MD5 states A,B add x1, x1, #64 // Increment data pointer subs w2, w2, #1 // Decrement block counter b.ne md5_blocks_loop ldp x21,x22,[sp,#16] ldp x23,x24,[sp,#32] ldp x25,x26,[sp,#48] ldp x27,x28,[sp,#64] ldp x19,x20,[sp],#80 ret #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
49,157
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/sha512-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) // Copyright 2014-2020 The OpenSSL Project Authors. All Rights Reserved. // // Licensed under the OpenSSL license (the "License"). You may not use // this file except in compliance with the License. You can obtain a copy // in the file LICENSE in the source distribution or at // https://www.openssl.org/source/license.html // ==================================================================== // Written by Andy Polyakov <appro@openssl.org> for the OpenSSL // project. The module is, however, dual licensed under OpenSSL and // CRYPTOGAMS licenses depending on where you obtain it. For further // details see http://www.openssl.org/~appro/cryptogams/. // // Permission to use under GPLv2 terms is granted. // ==================================================================== // // SHA256/512 for ARMv8. // // Performance in cycles per processed byte and improvement coefficient // over code generated with "default" compiler: // // SHA256-hw SHA256(*) SHA512 // Apple A7 1.97 10.5 (+33%) 6.73 (-1%(**)) // Cortex-A53 2.38 15.5 (+115%) 10.0 (+150%(***)) // Cortex-A57 2.31 11.6 (+86%) 7.51 (+260%(***)) // Denver 2.01 10.5 (+26%) 6.70 (+8%) // X-Gene 20.0 (+100%) 12.8 (+300%(***)) // Mongoose 2.36 13.0 (+50%) 8.36 (+33%) // Kryo 1.92 17.4 (+30%) 11.2 (+8%) // // (*) Software SHA256 results are of lesser relevance, presented // mostly for informational purposes. // (**) The result is a trade-off: it's possible to improve it by // 10% (or by 1 cycle per round), but at the cost of 20% loss // on Cortex-A53 (or by 4 cycles per round). // (***) Super-impressive coefficients over gcc-generated code are // indication of some compiler "pathology", most notably code // generated with -mgeneral-regs-only is significantly faster // and the gap is only 40-90%. #ifndef __KERNEL__ # include <openssl/arm_arch.h> #endif .text .globl _sha512_block_data_order_nohw .private_extern _sha512_block_data_order_nohw .align 6 _sha512_block_data_order_nohw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#4*8 ldp x20,x21,[x0] // load context ldp x22,x23,[x0,#2*8] ldp x24,x25,[x0,#4*8] add x2,x1,x2,lsl#7 // end of input ldp x26,x27,[x0,#6*8] adrp x30,LK512@PAGE add x30,x30,LK512@PAGEOFF stp x0,x2,[x29,#96] Loop: ldp x3,x4,[x1],#2*8 ldr x19,[x30],#8 // *K++ eor x28,x21,x22 // magic seed str x1,[x29,#112] #ifndef __AARCH64EB__ rev x3,x3 // 0 #endif ror x16,x24,#14 add x27,x27,x19 // h+=K[i] eor x6,x24,x24,ror#23 and x17,x25,x24 bic x19,x26,x24 add x27,x27,x3 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x6,ror#18 // Sigma1(e) ror x6,x20,#28 add x27,x27,x17 // h+=Ch(e,f,g) eor x17,x20,x20,ror#5 add x27,x27,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x23,x23,x27 // d+=h eor x28,x28,x21 // Maj(a,b,c) eor x17,x6,x17,ror#34 // Sigma0(a) add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x27,x27,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x4,x4 // 1 #endif ldp x5,x6,[x1],#2*8 add x27,x27,x17 // h+=Sigma0(a) ror x16,x23,#14 add x26,x26,x28 // h+=K[i] eor x7,x23,x23,ror#23 and x17,x24,x23 bic x28,x25,x23 add x26,x26,x4 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x7,ror#18 // Sigma1(e) ror x7,x27,#28 add x26,x26,x17 // h+=Ch(e,f,g) eor x17,x27,x27,ror#5 add x26,x26,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x22,x22,x26 // d+=h eor x19,x19,x20 // Maj(a,b,c) eor x17,x7,x17,ror#34 // Sigma0(a) add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x26,x26,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x5,x5 // 2 #endif add x26,x26,x17 // h+=Sigma0(a) ror x16,x22,#14 add x25,x25,x19 // h+=K[i] eor x8,x22,x22,ror#23 and x17,x23,x22 bic x19,x24,x22 add x25,x25,x5 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x8,ror#18 // Sigma1(e) ror x8,x26,#28 add x25,x25,x17 // h+=Ch(e,f,g) eor x17,x26,x26,ror#5 add x25,x25,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x21,x21,x25 // d+=h eor x28,x28,x27 // Maj(a,b,c) eor x17,x8,x17,ror#34 // Sigma0(a) add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x25,x25,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x6,x6 // 3 #endif ldp x7,x8,[x1],#2*8 add x25,x25,x17 // h+=Sigma0(a) ror x16,x21,#14 add x24,x24,x28 // h+=K[i] eor x9,x21,x21,ror#23 and x17,x22,x21 bic x28,x23,x21 add x24,x24,x6 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x9,ror#18 // Sigma1(e) ror x9,x25,#28 add x24,x24,x17 // h+=Ch(e,f,g) eor x17,x25,x25,ror#5 add x24,x24,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x20,x20,x24 // d+=h eor x19,x19,x26 // Maj(a,b,c) eor x17,x9,x17,ror#34 // Sigma0(a) add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x24,x24,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x7,x7 // 4 #endif add x24,x24,x17 // h+=Sigma0(a) ror x16,x20,#14 add x23,x23,x19 // h+=K[i] eor x10,x20,x20,ror#23 and x17,x21,x20 bic x19,x22,x20 add x23,x23,x7 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x10,ror#18 // Sigma1(e) ror x10,x24,#28 add x23,x23,x17 // h+=Ch(e,f,g) eor x17,x24,x24,ror#5 add x23,x23,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x27,x27,x23 // d+=h eor x28,x28,x25 // Maj(a,b,c) eor x17,x10,x17,ror#34 // Sigma0(a) add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x23,x23,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x8,x8 // 5 #endif ldp x9,x10,[x1],#2*8 add x23,x23,x17 // h+=Sigma0(a) ror x16,x27,#14 add x22,x22,x28 // h+=K[i] eor x11,x27,x27,ror#23 and x17,x20,x27 bic x28,x21,x27 add x22,x22,x8 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x11,ror#18 // Sigma1(e) ror x11,x23,#28 add x22,x22,x17 // h+=Ch(e,f,g) eor x17,x23,x23,ror#5 add x22,x22,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x26,x26,x22 // d+=h eor x19,x19,x24 // Maj(a,b,c) eor x17,x11,x17,ror#34 // Sigma0(a) add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x22,x22,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x9,x9 // 6 #endif add x22,x22,x17 // h+=Sigma0(a) ror x16,x26,#14 add x21,x21,x19 // h+=K[i] eor x12,x26,x26,ror#23 and x17,x27,x26 bic x19,x20,x26 add x21,x21,x9 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x12,ror#18 // Sigma1(e) ror x12,x22,#28 add x21,x21,x17 // h+=Ch(e,f,g) eor x17,x22,x22,ror#5 add x21,x21,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x25,x25,x21 // d+=h eor x28,x28,x23 // Maj(a,b,c) eor x17,x12,x17,ror#34 // Sigma0(a) add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x21,x21,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x10,x10 // 7 #endif ldp x11,x12,[x1],#2*8 add x21,x21,x17 // h+=Sigma0(a) ror x16,x25,#14 add x20,x20,x28 // h+=K[i] eor x13,x25,x25,ror#23 and x17,x26,x25 bic x28,x27,x25 add x20,x20,x10 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x13,ror#18 // Sigma1(e) ror x13,x21,#28 add x20,x20,x17 // h+=Ch(e,f,g) eor x17,x21,x21,ror#5 add x20,x20,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x24,x24,x20 // d+=h eor x19,x19,x22 // Maj(a,b,c) eor x17,x13,x17,ror#34 // Sigma0(a) add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x20,x20,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x11,x11 // 8 #endif add x20,x20,x17 // h+=Sigma0(a) ror x16,x24,#14 add x27,x27,x19 // h+=K[i] eor x14,x24,x24,ror#23 and x17,x25,x24 bic x19,x26,x24 add x27,x27,x11 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x14,ror#18 // Sigma1(e) ror x14,x20,#28 add x27,x27,x17 // h+=Ch(e,f,g) eor x17,x20,x20,ror#5 add x27,x27,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x23,x23,x27 // d+=h eor x28,x28,x21 // Maj(a,b,c) eor x17,x14,x17,ror#34 // Sigma0(a) add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x27,x27,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x12,x12 // 9 #endif ldp x13,x14,[x1],#2*8 add x27,x27,x17 // h+=Sigma0(a) ror x16,x23,#14 add x26,x26,x28 // h+=K[i] eor x15,x23,x23,ror#23 and x17,x24,x23 bic x28,x25,x23 add x26,x26,x12 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x15,ror#18 // Sigma1(e) ror x15,x27,#28 add x26,x26,x17 // h+=Ch(e,f,g) eor x17,x27,x27,ror#5 add x26,x26,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x22,x22,x26 // d+=h eor x19,x19,x20 // Maj(a,b,c) eor x17,x15,x17,ror#34 // Sigma0(a) add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x26,x26,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x13,x13 // 10 #endif add x26,x26,x17 // h+=Sigma0(a) ror x16,x22,#14 add x25,x25,x19 // h+=K[i] eor x0,x22,x22,ror#23 and x17,x23,x22 bic x19,x24,x22 add x25,x25,x13 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x0,ror#18 // Sigma1(e) ror x0,x26,#28 add x25,x25,x17 // h+=Ch(e,f,g) eor x17,x26,x26,ror#5 add x25,x25,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x21,x21,x25 // d+=h eor x28,x28,x27 // Maj(a,b,c) eor x17,x0,x17,ror#34 // Sigma0(a) add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x25,x25,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x14,x14 // 11 #endif ldp x15,x0,[x1],#2*8 add x25,x25,x17 // h+=Sigma0(a) str x6,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] eor x6,x21,x21,ror#23 and x17,x22,x21 bic x28,x23,x21 add x24,x24,x14 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x6,ror#18 // Sigma1(e) ror x6,x25,#28 add x24,x24,x17 // h+=Ch(e,f,g) eor x17,x25,x25,ror#5 add x24,x24,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x20,x20,x24 // d+=h eor x19,x19,x26 // Maj(a,b,c) eor x17,x6,x17,ror#34 // Sigma0(a) add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x24,x24,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x15,x15 // 12 #endif add x24,x24,x17 // h+=Sigma0(a) str x7,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] eor x7,x20,x20,ror#23 and x17,x21,x20 bic x19,x22,x20 add x23,x23,x15 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x7,ror#18 // Sigma1(e) ror x7,x24,#28 add x23,x23,x17 // h+=Ch(e,f,g) eor x17,x24,x24,ror#5 add x23,x23,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x27,x27,x23 // d+=h eor x28,x28,x25 // Maj(a,b,c) eor x17,x7,x17,ror#34 // Sigma0(a) add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x23,x23,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x0,x0 // 13 #endif ldp x1,x2,[x1] add x23,x23,x17 // h+=Sigma0(a) str x8,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] eor x8,x27,x27,ror#23 and x17,x20,x27 bic x28,x21,x27 add x22,x22,x0 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x8,ror#18 // Sigma1(e) ror x8,x23,#28 add x22,x22,x17 // h+=Ch(e,f,g) eor x17,x23,x23,ror#5 add x22,x22,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x26,x26,x22 // d+=h eor x19,x19,x24 // Maj(a,b,c) eor x17,x8,x17,ror#34 // Sigma0(a) add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x22,x22,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x1,x1 // 14 #endif ldr x6,[sp,#24] add x22,x22,x17 // h+=Sigma0(a) str x9,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] eor x9,x26,x26,ror#23 and x17,x27,x26 bic x19,x20,x26 add x21,x21,x1 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x9,ror#18 // Sigma1(e) ror x9,x22,#28 add x21,x21,x17 // h+=Ch(e,f,g) eor x17,x22,x22,ror#5 add x21,x21,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x25,x25,x21 // d+=h eor x28,x28,x23 // Maj(a,b,c) eor x17,x9,x17,ror#34 // Sigma0(a) add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x21,x21,x17 // h+=Sigma0(a) #ifndef __AARCH64EB__ rev x2,x2 // 15 #endif ldr x7,[sp,#0] add x21,x21,x17 // h+=Sigma0(a) str x10,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x9,x4,#1 and x17,x26,x25 ror x8,x1,#19 bic x28,x27,x25 ror x10,x21,#28 add x20,x20,x2 // h+=X[i] eor x16,x16,x25,ror#18 eor x9,x9,x4,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x10,x10,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x8,x8,x1,ror#61 eor x9,x9,x4,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x10,x21,ror#39 // Sigma0(a) eor x8,x8,x1,lsr#6 // sigma1(X[i+14]) add x3,x3,x12 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x3,x3,x9 add x20,x20,x17 // h+=Sigma0(a) add x3,x3,x8 Loop_16_xx: ldr x8,[sp,#8] str x11,[sp,#0] ror x16,x24,#14 add x27,x27,x19 // h+=K[i] ror x10,x5,#1 and x17,x25,x24 ror x9,x2,#19 bic x19,x26,x24 ror x11,x20,#28 add x27,x27,x3 // h+=X[i] eor x16,x16,x24,ror#18 eor x10,x10,x5,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x24,ror#41 // Sigma1(e) eor x11,x11,x20,ror#34 add x27,x27,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x9,x9,x2,ror#61 eor x10,x10,x5,lsr#7 // sigma0(X[i+1]) add x27,x27,x16 // h+=Sigma1(e) eor x28,x28,x21 // Maj(a,b,c) eor x17,x11,x20,ror#39 // Sigma0(a) eor x9,x9,x2,lsr#6 // sigma1(X[i+14]) add x4,x4,x13 add x23,x23,x27 // d+=h add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x4,x4,x10 add x27,x27,x17 // h+=Sigma0(a) add x4,x4,x9 ldr x9,[sp,#16] str x12,[sp,#8] ror x16,x23,#14 add x26,x26,x28 // h+=K[i] ror x11,x6,#1 and x17,x24,x23 ror x10,x3,#19 bic x28,x25,x23 ror x12,x27,#28 add x26,x26,x4 // h+=X[i] eor x16,x16,x23,ror#18 eor x11,x11,x6,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x23,ror#41 // Sigma1(e) eor x12,x12,x27,ror#34 add x26,x26,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x10,x10,x3,ror#61 eor x11,x11,x6,lsr#7 // sigma0(X[i+1]) add x26,x26,x16 // h+=Sigma1(e) eor x19,x19,x20 // Maj(a,b,c) eor x17,x12,x27,ror#39 // Sigma0(a) eor x10,x10,x3,lsr#6 // sigma1(X[i+14]) add x5,x5,x14 add x22,x22,x26 // d+=h add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x5,x5,x11 add x26,x26,x17 // h+=Sigma0(a) add x5,x5,x10 ldr x10,[sp,#24] str x13,[sp,#16] ror x16,x22,#14 add x25,x25,x19 // h+=K[i] ror x12,x7,#1 and x17,x23,x22 ror x11,x4,#19 bic x19,x24,x22 ror x13,x26,#28 add x25,x25,x5 // h+=X[i] eor x16,x16,x22,ror#18 eor x12,x12,x7,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x22,ror#41 // Sigma1(e) eor x13,x13,x26,ror#34 add x25,x25,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x11,x11,x4,ror#61 eor x12,x12,x7,lsr#7 // sigma0(X[i+1]) add x25,x25,x16 // h+=Sigma1(e) eor x28,x28,x27 // Maj(a,b,c) eor x17,x13,x26,ror#39 // Sigma0(a) eor x11,x11,x4,lsr#6 // sigma1(X[i+14]) add x6,x6,x15 add x21,x21,x25 // d+=h add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x6,x6,x12 add x25,x25,x17 // h+=Sigma0(a) add x6,x6,x11 ldr x11,[sp,#0] str x14,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] ror x13,x8,#1 and x17,x22,x21 ror x12,x5,#19 bic x28,x23,x21 ror x14,x25,#28 add x24,x24,x6 // h+=X[i] eor x16,x16,x21,ror#18 eor x13,x13,x8,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x21,ror#41 // Sigma1(e) eor x14,x14,x25,ror#34 add x24,x24,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x12,x12,x5,ror#61 eor x13,x13,x8,lsr#7 // sigma0(X[i+1]) add x24,x24,x16 // h+=Sigma1(e) eor x19,x19,x26 // Maj(a,b,c) eor x17,x14,x25,ror#39 // Sigma0(a) eor x12,x12,x5,lsr#6 // sigma1(X[i+14]) add x7,x7,x0 add x20,x20,x24 // d+=h add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x7,x7,x13 add x24,x24,x17 // h+=Sigma0(a) add x7,x7,x12 ldr x12,[sp,#8] str x15,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] ror x14,x9,#1 and x17,x21,x20 ror x13,x6,#19 bic x19,x22,x20 ror x15,x24,#28 add x23,x23,x7 // h+=X[i] eor x16,x16,x20,ror#18 eor x14,x14,x9,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x20,ror#41 // Sigma1(e) eor x15,x15,x24,ror#34 add x23,x23,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x13,x13,x6,ror#61 eor x14,x14,x9,lsr#7 // sigma0(X[i+1]) add x23,x23,x16 // h+=Sigma1(e) eor x28,x28,x25 // Maj(a,b,c) eor x17,x15,x24,ror#39 // Sigma0(a) eor x13,x13,x6,lsr#6 // sigma1(X[i+14]) add x8,x8,x1 add x27,x27,x23 // d+=h add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x8,x8,x14 add x23,x23,x17 // h+=Sigma0(a) add x8,x8,x13 ldr x13,[sp,#16] str x0,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] ror x15,x10,#1 and x17,x20,x27 ror x14,x7,#19 bic x28,x21,x27 ror x0,x23,#28 add x22,x22,x8 // h+=X[i] eor x16,x16,x27,ror#18 eor x15,x15,x10,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x27,ror#41 // Sigma1(e) eor x0,x0,x23,ror#34 add x22,x22,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x14,x14,x7,ror#61 eor x15,x15,x10,lsr#7 // sigma0(X[i+1]) add x22,x22,x16 // h+=Sigma1(e) eor x19,x19,x24 // Maj(a,b,c) eor x17,x0,x23,ror#39 // Sigma0(a) eor x14,x14,x7,lsr#6 // sigma1(X[i+14]) add x9,x9,x2 add x26,x26,x22 // d+=h add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x9,x9,x15 add x22,x22,x17 // h+=Sigma0(a) add x9,x9,x14 ldr x14,[sp,#24] str x1,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] ror x0,x11,#1 and x17,x27,x26 ror x15,x8,#19 bic x19,x20,x26 ror x1,x22,#28 add x21,x21,x9 // h+=X[i] eor x16,x16,x26,ror#18 eor x0,x0,x11,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x26,ror#41 // Sigma1(e) eor x1,x1,x22,ror#34 add x21,x21,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x15,x15,x8,ror#61 eor x0,x0,x11,lsr#7 // sigma0(X[i+1]) add x21,x21,x16 // h+=Sigma1(e) eor x28,x28,x23 // Maj(a,b,c) eor x17,x1,x22,ror#39 // Sigma0(a) eor x15,x15,x8,lsr#6 // sigma1(X[i+14]) add x10,x10,x3 add x25,x25,x21 // d+=h add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x10,x10,x0 add x21,x21,x17 // h+=Sigma0(a) add x10,x10,x15 ldr x15,[sp,#0] str x2,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x1,x12,#1 and x17,x26,x25 ror x0,x9,#19 bic x28,x27,x25 ror x2,x21,#28 add x20,x20,x10 // h+=X[i] eor x16,x16,x25,ror#18 eor x1,x1,x12,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x2,x2,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x0,x0,x9,ror#61 eor x1,x1,x12,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x2,x21,ror#39 // Sigma0(a) eor x0,x0,x9,lsr#6 // sigma1(X[i+14]) add x11,x11,x4 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x11,x11,x1 add x20,x20,x17 // h+=Sigma0(a) add x11,x11,x0 ldr x0,[sp,#8] str x3,[sp,#0] ror x16,x24,#14 add x27,x27,x19 // h+=K[i] ror x2,x13,#1 and x17,x25,x24 ror x1,x10,#19 bic x19,x26,x24 ror x3,x20,#28 add x27,x27,x11 // h+=X[i] eor x16,x16,x24,ror#18 eor x2,x2,x13,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x24,ror#41 // Sigma1(e) eor x3,x3,x20,ror#34 add x27,x27,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x1,x1,x10,ror#61 eor x2,x2,x13,lsr#7 // sigma0(X[i+1]) add x27,x27,x16 // h+=Sigma1(e) eor x28,x28,x21 // Maj(a,b,c) eor x17,x3,x20,ror#39 // Sigma0(a) eor x1,x1,x10,lsr#6 // sigma1(X[i+14]) add x12,x12,x5 add x23,x23,x27 // d+=h add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x12,x12,x2 add x27,x27,x17 // h+=Sigma0(a) add x12,x12,x1 ldr x1,[sp,#16] str x4,[sp,#8] ror x16,x23,#14 add x26,x26,x28 // h+=K[i] ror x3,x14,#1 and x17,x24,x23 ror x2,x11,#19 bic x28,x25,x23 ror x4,x27,#28 add x26,x26,x12 // h+=X[i] eor x16,x16,x23,ror#18 eor x3,x3,x14,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x23,ror#41 // Sigma1(e) eor x4,x4,x27,ror#34 add x26,x26,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x2,x2,x11,ror#61 eor x3,x3,x14,lsr#7 // sigma0(X[i+1]) add x26,x26,x16 // h+=Sigma1(e) eor x19,x19,x20 // Maj(a,b,c) eor x17,x4,x27,ror#39 // Sigma0(a) eor x2,x2,x11,lsr#6 // sigma1(X[i+14]) add x13,x13,x6 add x22,x22,x26 // d+=h add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x13,x13,x3 add x26,x26,x17 // h+=Sigma0(a) add x13,x13,x2 ldr x2,[sp,#24] str x5,[sp,#16] ror x16,x22,#14 add x25,x25,x19 // h+=K[i] ror x4,x15,#1 and x17,x23,x22 ror x3,x12,#19 bic x19,x24,x22 ror x5,x26,#28 add x25,x25,x13 // h+=X[i] eor x16,x16,x22,ror#18 eor x4,x4,x15,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x22,ror#41 // Sigma1(e) eor x5,x5,x26,ror#34 add x25,x25,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x3,x3,x12,ror#61 eor x4,x4,x15,lsr#7 // sigma0(X[i+1]) add x25,x25,x16 // h+=Sigma1(e) eor x28,x28,x27 // Maj(a,b,c) eor x17,x5,x26,ror#39 // Sigma0(a) eor x3,x3,x12,lsr#6 // sigma1(X[i+14]) add x14,x14,x7 add x21,x21,x25 // d+=h add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x14,x14,x4 add x25,x25,x17 // h+=Sigma0(a) add x14,x14,x3 ldr x3,[sp,#0] str x6,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] ror x5,x0,#1 and x17,x22,x21 ror x4,x13,#19 bic x28,x23,x21 ror x6,x25,#28 add x24,x24,x14 // h+=X[i] eor x16,x16,x21,ror#18 eor x5,x5,x0,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x21,ror#41 // Sigma1(e) eor x6,x6,x25,ror#34 add x24,x24,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x4,x4,x13,ror#61 eor x5,x5,x0,lsr#7 // sigma0(X[i+1]) add x24,x24,x16 // h+=Sigma1(e) eor x19,x19,x26 // Maj(a,b,c) eor x17,x6,x25,ror#39 // Sigma0(a) eor x4,x4,x13,lsr#6 // sigma1(X[i+14]) add x15,x15,x8 add x20,x20,x24 // d+=h add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x15,x15,x5 add x24,x24,x17 // h+=Sigma0(a) add x15,x15,x4 ldr x4,[sp,#8] str x7,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] ror x6,x1,#1 and x17,x21,x20 ror x5,x14,#19 bic x19,x22,x20 ror x7,x24,#28 add x23,x23,x15 // h+=X[i] eor x16,x16,x20,ror#18 eor x6,x6,x1,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x20,ror#41 // Sigma1(e) eor x7,x7,x24,ror#34 add x23,x23,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x5,x5,x14,ror#61 eor x6,x6,x1,lsr#7 // sigma0(X[i+1]) add x23,x23,x16 // h+=Sigma1(e) eor x28,x28,x25 // Maj(a,b,c) eor x17,x7,x24,ror#39 // Sigma0(a) eor x5,x5,x14,lsr#6 // sigma1(X[i+14]) add x0,x0,x9 add x27,x27,x23 // d+=h add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x0,x0,x6 add x23,x23,x17 // h+=Sigma0(a) add x0,x0,x5 ldr x5,[sp,#16] str x8,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] ror x7,x2,#1 and x17,x20,x27 ror x6,x15,#19 bic x28,x21,x27 ror x8,x23,#28 add x22,x22,x0 // h+=X[i] eor x16,x16,x27,ror#18 eor x7,x7,x2,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x27,ror#41 // Sigma1(e) eor x8,x8,x23,ror#34 add x22,x22,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x6,x6,x15,ror#61 eor x7,x7,x2,lsr#7 // sigma0(X[i+1]) add x22,x22,x16 // h+=Sigma1(e) eor x19,x19,x24 // Maj(a,b,c) eor x17,x8,x23,ror#39 // Sigma0(a) eor x6,x6,x15,lsr#6 // sigma1(X[i+14]) add x1,x1,x10 add x26,x26,x22 // d+=h add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x1,x1,x7 add x22,x22,x17 // h+=Sigma0(a) add x1,x1,x6 ldr x6,[sp,#24] str x9,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] ror x8,x3,#1 and x17,x27,x26 ror x7,x0,#19 bic x19,x20,x26 ror x9,x22,#28 add x21,x21,x1 // h+=X[i] eor x16,x16,x26,ror#18 eor x8,x8,x3,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x26,ror#41 // Sigma1(e) eor x9,x9,x22,ror#34 add x21,x21,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x7,x7,x0,ror#61 eor x8,x8,x3,lsr#7 // sigma0(X[i+1]) add x21,x21,x16 // h+=Sigma1(e) eor x28,x28,x23 // Maj(a,b,c) eor x17,x9,x22,ror#39 // Sigma0(a) eor x7,x7,x0,lsr#6 // sigma1(X[i+14]) add x2,x2,x11 add x25,x25,x21 // d+=h add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x2,x2,x8 add x21,x21,x17 // h+=Sigma0(a) add x2,x2,x7 ldr x7,[sp,#0] str x10,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x9,x4,#1 and x17,x26,x25 ror x8,x1,#19 bic x28,x27,x25 ror x10,x21,#28 add x20,x20,x2 // h+=X[i] eor x16,x16,x25,ror#18 eor x9,x9,x4,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x10,x10,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x8,x8,x1,ror#61 eor x9,x9,x4,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x10,x21,ror#39 // Sigma0(a) eor x8,x8,x1,lsr#6 // sigma1(X[i+14]) add x3,x3,x12 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x3,x3,x9 add x20,x20,x17 // h+=Sigma0(a) add x3,x3,x8 cbnz x19,Loop_16_xx ldp x0,x2,[x29,#96] ldr x1,[x29,#112] sub x30,x30,#648 // rewind ldp x3,x4,[x0] ldp x5,x6,[x0,#2*8] add x1,x1,#14*8 // advance input pointer ldp x7,x8,[x0,#4*8] add x20,x20,x3 ldp x9,x10,[x0,#6*8] add x21,x21,x4 add x22,x22,x5 add x23,x23,x6 stp x20,x21,[x0] add x24,x24,x7 add x25,x25,x8 stp x22,x23,[x0,#2*8] add x26,x26,x9 add x27,x27,x10 cmp x1,x2 stp x24,x25,[x0,#4*8] stp x26,x27,[x0,#6*8] b.ne Loop ldp x19,x20,[x29,#16] add sp,sp,#4*8 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .section __TEXT,__const .align 6 LK512: .quad 0x428a2f98d728ae22,0x7137449123ef65cd .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc .quad 0x3956c25bf348b538,0x59f111f1b605d019 .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 .quad 0xd807aa98a3030242,0x12835b0145706fbe .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 .quad 0x9bdc06a725c71235,0xc19bf174cf692694 .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 .quad 0x983e5152ee66dfab,0xa831c66d2db43210 .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 .quad 0x06ca6351e003826f,0x142929670a0e6e70 .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df .quad 0x650a73548baf63de,0x766a0abb3c77b2a8 .quad 0x81c2c92e47edaee6,0x92722c851482353b .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 .quad 0xc24b8b70d0f89791,0xc76c51a30654be30 .quad 0xd192e819d6ef5218,0xd69906245565a910 .quad 0xf40e35855771202a,0x106aa07032bbd1b8 .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 .quad 0x748f82ee5defb2fc,0x78a5636f43172f60 .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec .quad 0x90befffa23631e28,0xa4506cebde82bde9 .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b .quad 0xca273eceea26619c,0xd186b8c721c0c207 .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 .quad 0x113f9804bef90dae,0x1b710b35131c471b .quad 0x28db77f523047d84,0x32caab7b40c72493 .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 .quad 0 // terminator .byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 .text #ifndef __KERNEL__ .globl _sha512_block_data_order_hw .private_extern _sha512_block_data_order_hw .align 6 _sha512_block_data_order_hw: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#8] // kFlag_sha512_hw #endif // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 ld1 {v16.16b,v17.16b,v18.16b,v19.16b},[x1],#64 // load input ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 ld1 {v0.2d,v1.2d,v2.2d,v3.2d},[x0] // load context adrp x3,LK512@PAGE add x3,x3,LK512@PAGEOFF rev64 v16.16b,v16.16b rev64 v17.16b,v17.16b rev64 v18.16b,v18.16b rev64 v19.16b,v19.16b rev64 v20.16b,v20.16b rev64 v21.16b,v21.16b rev64 v22.16b,v22.16b rev64 v23.16b,v23.16b b Loop_hw .align 4 Loop_hw: ld1 {v24.2d},[x3],#16 subs x2,x2,#1 sub x4,x1,#128 orr v26.16b,v0.16b,v0.16b // offload orr v27.16b,v1.16b,v1.16b orr v28.16b,v2.16b,v2.16b orr v29.16b,v3.16b,v3.16b csel x1,x1,x4,ne // conditional rewind add v24.2d,v24.2d,v16.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec08230 //sha512su0 v16.16b,v17.16b ext v7.16b,v20.16b,v21.16b,#8 .long 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .long 0xce678af0 //sha512su1 v16.16b,v23.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .long 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v25.2d,v25.2d,v17.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08251 //sha512su0 v17.16b,v18.16b ext v7.16b,v21.16b,v22.16b,#8 .long 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .long 0xce678a11 //sha512su1 v17.16b,v16.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .long 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v24.2d,v24.2d,v18.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec08272 //sha512su0 v18.16b,v19.16b ext v7.16b,v22.16b,v23.16b,#8 .long 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .long 0xce678a32 //sha512su1 v18.16b,v17.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .long 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v25.2d,v25.2d,v19.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08293 //sha512su0 v19.16b,v20.16b ext v7.16b,v23.16b,v16.16b,#8 .long 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .long 0xce678a53 //sha512su1 v19.16b,v18.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .long 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v24.2d,v24.2d,v20.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec082b4 //sha512su0 v20.16b,v21.16b ext v7.16b,v16.16b,v17.16b,#8 .long 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .long 0xce678a74 //sha512su1 v20.16b,v19.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .long 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v25.2d,v25.2d,v21.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec082d5 //sha512su0 v21.16b,v22.16b ext v7.16b,v17.16b,v18.16b,#8 .long 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .long 0xce678a95 //sha512su1 v21.16b,v20.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .long 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v24.2d,v24.2d,v22.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec082f6 //sha512su0 v22.16b,v23.16b ext v7.16b,v18.16b,v19.16b,#8 .long 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .long 0xce678ab6 //sha512su1 v22.16b,v21.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .long 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v25.2d,v25.2d,v23.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08217 //sha512su0 v23.16b,v16.16b ext v7.16b,v19.16b,v20.16b,#8 .long 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .long 0xce678ad7 //sha512su1 v23.16b,v22.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .long 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v24.2d,v24.2d,v16.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec08230 //sha512su0 v16.16b,v17.16b ext v7.16b,v20.16b,v21.16b,#8 .long 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .long 0xce678af0 //sha512su1 v16.16b,v23.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .long 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v25.2d,v25.2d,v17.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08251 //sha512su0 v17.16b,v18.16b ext v7.16b,v21.16b,v22.16b,#8 .long 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .long 0xce678a11 //sha512su1 v17.16b,v16.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .long 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v24.2d,v24.2d,v18.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec08272 //sha512su0 v18.16b,v19.16b ext v7.16b,v22.16b,v23.16b,#8 .long 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .long 0xce678a32 //sha512su1 v18.16b,v17.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .long 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v25.2d,v25.2d,v19.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08293 //sha512su0 v19.16b,v20.16b ext v7.16b,v23.16b,v16.16b,#8 .long 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .long 0xce678a53 //sha512su1 v19.16b,v18.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .long 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v24.2d,v24.2d,v20.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec082b4 //sha512su0 v20.16b,v21.16b ext v7.16b,v16.16b,v17.16b,#8 .long 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .long 0xce678a74 //sha512su1 v20.16b,v19.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .long 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v25.2d,v25.2d,v21.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec082d5 //sha512su0 v21.16b,v22.16b ext v7.16b,v17.16b,v18.16b,#8 .long 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .long 0xce678a95 //sha512su1 v21.16b,v20.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .long 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v24.2d,v24.2d,v22.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec082f6 //sha512su0 v22.16b,v23.16b ext v7.16b,v18.16b,v19.16b,#8 .long 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .long 0xce678ab6 //sha512su1 v22.16b,v21.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .long 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v25.2d,v25.2d,v23.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08217 //sha512su0 v23.16b,v16.16b ext v7.16b,v19.16b,v20.16b,#8 .long 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .long 0xce678ad7 //sha512su1 v23.16b,v22.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .long 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v24.2d,v24.2d,v16.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec08230 //sha512su0 v16.16b,v17.16b ext v7.16b,v20.16b,v21.16b,#8 .long 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .long 0xce678af0 //sha512su1 v16.16b,v23.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .long 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v25.2d,v25.2d,v17.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08251 //sha512su0 v17.16b,v18.16b ext v7.16b,v21.16b,v22.16b,#8 .long 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .long 0xce678a11 //sha512su1 v17.16b,v16.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .long 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v24.2d,v24.2d,v18.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec08272 //sha512su0 v18.16b,v19.16b ext v7.16b,v22.16b,v23.16b,#8 .long 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .long 0xce678a32 //sha512su1 v18.16b,v17.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .long 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v25.2d,v25.2d,v19.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08293 //sha512su0 v19.16b,v20.16b ext v7.16b,v23.16b,v16.16b,#8 .long 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .long 0xce678a53 //sha512su1 v19.16b,v18.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .long 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v24.2d,v24.2d,v20.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec082b4 //sha512su0 v20.16b,v21.16b ext v7.16b,v16.16b,v17.16b,#8 .long 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .long 0xce678a74 //sha512su1 v20.16b,v19.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .long 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v25.2d,v25.2d,v21.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec082d5 //sha512su0 v21.16b,v22.16b ext v7.16b,v17.16b,v18.16b,#8 .long 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .long 0xce678a95 //sha512su1 v21.16b,v20.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .long 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v24.2d,v24.2d,v22.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec082f6 //sha512su0 v22.16b,v23.16b ext v7.16b,v18.16b,v19.16b,#8 .long 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .long 0xce678ab6 //sha512su1 v22.16b,v21.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .long 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v25.2d,v25.2d,v23.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08217 //sha512su0 v23.16b,v16.16b ext v7.16b,v19.16b,v20.16b,#8 .long 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .long 0xce678ad7 //sha512su1 v23.16b,v22.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .long 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v24.2d,v24.2d,v16.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec08230 //sha512su0 v16.16b,v17.16b ext v7.16b,v20.16b,v21.16b,#8 .long 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .long 0xce678af0 //sha512su1 v16.16b,v23.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .long 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v25.2d,v25.2d,v17.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08251 //sha512su0 v17.16b,v18.16b ext v7.16b,v21.16b,v22.16b,#8 .long 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .long 0xce678a11 //sha512su1 v17.16b,v16.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .long 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v24.2d,v24.2d,v18.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec08272 //sha512su0 v18.16b,v19.16b ext v7.16b,v22.16b,v23.16b,#8 .long 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .long 0xce678a32 //sha512su1 v18.16b,v17.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .long 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b add v25.2d,v25.2d,v19.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08293 //sha512su0 v19.16b,v20.16b ext v7.16b,v23.16b,v16.16b,#8 .long 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b .long 0xce678a53 //sha512su1 v19.16b,v18.16b,v7.16b add v0.2d,v3.2d,v4.2d // "D + T1" .long 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b add v24.2d,v24.2d,v20.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec082b4 //sha512su0 v20.16b,v21.16b ext v7.16b,v16.16b,v17.16b,#8 .long 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b .long 0xce678a74 //sha512su1 v20.16b,v19.16b,v7.16b add v3.2d,v2.2d,v1.2d // "D + T1" .long 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b add v25.2d,v25.2d,v21.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec082d5 //sha512su0 v21.16b,v22.16b ext v7.16b,v17.16b,v18.16b,#8 .long 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b .long 0xce678a95 //sha512su1 v21.16b,v20.16b,v7.16b add v2.2d,v4.2d,v0.2d // "D + T1" .long 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v24.2d,v24.2d,v22.2d ld1 {v25.2d},[x3],#16 ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v24.2d // "T1 + H + K512[i]" .long 0xcec082f6 //sha512su0 v22.16b,v23.16b ext v7.16b,v18.16b,v19.16b,#8 .long 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b .long 0xce678ab6 //sha512su1 v22.16b,v21.16b,v7.16b add v4.2d,v1.2d,v3.2d // "D + T1" .long 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b add v25.2d,v25.2d,v23.2d ld1 {v24.2d},[x3],#16 ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v25.2d // "T1 + H + K512[i]" .long 0xcec08217 //sha512su0 v23.16b,v16.16b ext v7.16b,v19.16b,v20.16b,#8 .long 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b .long 0xce678ad7 //sha512su1 v23.16b,v22.16b,v7.16b add v1.2d,v0.2d,v2.2d // "D + T1" .long 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b ld1 {v25.2d},[x3],#16 add v24.2d,v24.2d,v16.2d ld1 {v16.16b},[x1],#16 // load next input ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v24.2d // "T1 + H + K512[i]" .long 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b rev64 v16.16b,v16.16b add v0.2d,v3.2d,v4.2d // "D + T1" .long 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b ld1 {v24.2d},[x3],#16 add v25.2d,v25.2d,v17.2d ld1 {v17.16b},[x1],#16 // load next input ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v25.2d // "T1 + H + K512[i]" .long 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b rev64 v17.16b,v17.16b add v3.2d,v2.2d,v1.2d // "D + T1" .long 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b ld1 {v25.2d},[x3],#16 add v24.2d,v24.2d,v18.2d ld1 {v18.16b},[x1],#16 // load next input ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v24.2d // "T1 + H + K512[i]" .long 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b rev64 v18.16b,v18.16b add v2.2d,v4.2d,v0.2d // "D + T1" .long 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b ld1 {v24.2d},[x3],#16 add v25.2d,v25.2d,v19.2d ld1 {v19.16b},[x1],#16 // load next input ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v2.16b,v3.16b,#8 ext v6.16b,v1.16b,v2.16b,#8 add v3.2d,v3.2d,v25.2d // "T1 + H + K512[i]" .long 0xce6680a3 //sha512h v3.16b,v5.16b,v6.16b rev64 v19.16b,v19.16b add v4.2d,v1.2d,v3.2d // "D + T1" .long 0xce608423 //sha512h2 v3.16b,v1.16b,v0.16b ld1 {v25.2d},[x3],#16 add v24.2d,v24.2d,v20.2d ld1 {v20.16b},[x1],#16 // load next input ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v4.16b,v2.16b,#8 ext v6.16b,v0.16b,v4.16b,#8 add v2.2d,v2.2d,v24.2d // "T1 + H + K512[i]" .long 0xce6680a2 //sha512h v2.16b,v5.16b,v6.16b rev64 v20.16b,v20.16b add v1.2d,v0.2d,v2.2d // "D + T1" .long 0xce638402 //sha512h2 v2.16b,v0.16b,v3.16b ld1 {v24.2d},[x3],#16 add v25.2d,v25.2d,v21.2d ld1 {v21.16b},[x1],#16 // load next input ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v1.16b,v4.16b,#8 ext v6.16b,v3.16b,v1.16b,#8 add v4.2d,v4.2d,v25.2d // "T1 + H + K512[i]" .long 0xce6680a4 //sha512h v4.16b,v5.16b,v6.16b rev64 v21.16b,v21.16b add v0.2d,v3.2d,v4.2d // "D + T1" .long 0xce628464 //sha512h2 v4.16b,v3.16b,v2.16b ld1 {v25.2d},[x3],#16 add v24.2d,v24.2d,v22.2d ld1 {v22.16b},[x1],#16 // load next input ext v24.16b,v24.16b,v24.16b,#8 ext v5.16b,v0.16b,v1.16b,#8 ext v6.16b,v2.16b,v0.16b,#8 add v1.2d,v1.2d,v24.2d // "T1 + H + K512[i]" .long 0xce6680a1 //sha512h v1.16b,v5.16b,v6.16b rev64 v22.16b,v22.16b add v3.2d,v2.2d,v1.2d // "D + T1" .long 0xce648441 //sha512h2 v1.16b,v2.16b,v4.16b sub x3,x3,#80*8 // rewind add v25.2d,v25.2d,v23.2d ld1 {v23.16b},[x1],#16 // load next input ext v25.16b,v25.16b,v25.16b,#8 ext v5.16b,v3.16b,v0.16b,#8 ext v6.16b,v4.16b,v3.16b,#8 add v0.2d,v0.2d,v25.2d // "T1 + H + K512[i]" .long 0xce6680a0 //sha512h v0.16b,v5.16b,v6.16b rev64 v23.16b,v23.16b add v2.2d,v4.2d,v0.2d // "D + T1" .long 0xce618480 //sha512h2 v0.16b,v4.16b,v1.16b add v0.2d,v0.2d,v26.2d // accumulate add v1.2d,v1.2d,v27.2d add v2.2d,v2.2d,v28.2d add v3.2d,v3.2d,v29.2d cbnz x2,Loop_hw st1 {v0.2d,v1.2d,v2.2d,v3.2d},[x0] // store context ldr x29,[sp],#16 ret #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
17,229
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/ghashv8-armx.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> #if __ARM_MAX_ARCH__>=7 .text .globl _gcm_init_v8 .private_extern _gcm_init_v8 .align 4 _gcm_init_v8: AARCH64_VALID_CALL_TARGET ld1 {v17.2d},[x1] //load input H movi v19.16b,#0xe1 shl v19.2d,v19.2d,#57 //0xc2.0 ext v3.16b,v17.16b,v17.16b,#8 ushr v18.2d,v19.2d,#63 dup v17.4s,v17.s[1] ext v16.16b,v18.16b,v19.16b,#8 //t0=0xc2....01 ushr v18.2d,v3.2d,#63 sshr v17.4s,v17.4s,#31 //broadcast carry bit and v18.16b,v18.16b,v16.16b shl v3.2d,v3.2d,#1 ext v18.16b,v18.16b,v18.16b,#8 and v16.16b,v16.16b,v17.16b orr v3.16b,v3.16b,v18.16b //H<<<=1 eor v20.16b,v3.16b,v16.16b //twisted H ext v20.16b, v20.16b, v20.16b, #8 st1 {v20.2d},[x0],#16 //store Htable[0] //calculate H^2 ext v16.16b,v20.16b,v20.16b,#8 //Karatsuba pre-processing pmull2 v0.1q,v20.2d,v20.2d eor v16.16b,v16.16b,v20.16b pmull v2.1q,v20.1d,v20.1d pmull v1.1q,v16.1d,v16.1d ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v17.16b,v0.16b,v18.16b ext v22.16b,v17.16b,v17.16b,#8 //Karatsuba pre-processing eor v17.16b,v17.16b,v22.16b ext v21.16b,v16.16b,v17.16b,#8 //pack Karatsuba pre-processed st1 {v21.2d},[x0],#16 //store Htable[1..2] st1 {v22.2d},[x0],#16 //store Htable[1..2] //calculate H^3 and H^4 pmull2 v0.1q,v20.2d, v22.2d pmull2 v5.1q,v22.2d,v22.2d pmull v2.1q,v20.1d, v22.1d pmull v7.1q,v22.1d,v22.1d pmull v1.1q,v16.1d,v17.1d pmull v6.1q,v17.1d,v17.1d ext v16.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing ext v17.16b,v5.16b,v7.16b,#8 eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v16.16b eor v4.16b,v5.16b,v7.16b eor v6.16b,v6.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase eor v6.16b,v6.16b,v4.16b pmull v4.1q,v5.1d,v19.1d ins v2.d[0],v1.d[1] ins v7.d[0],v6.d[1] ins v1.d[1],v0.d[0] ins v6.d[1],v5.d[0] eor v0.16b,v1.16b,v18.16b eor v5.16b,v6.16b,v4.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase ext v4.16b,v5.16b,v5.16b,#8 pmull v0.1q,v0.1d,v19.1d pmull v5.1q,v5.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v4.16b,v4.16b,v7.16b eor v16.16b, v0.16b,v18.16b //H^3 eor v17.16b, v5.16b,v4.16b //H^4 ext v23.16b,v16.16b,v16.16b,#8 //Karatsuba pre-processing ext v25.16b,v17.16b,v17.16b,#8 ext v18.16b,v22.16b,v22.16b,#8 eor v16.16b,v16.16b,v23.16b eor v17.16b,v17.16b,v25.16b eor v18.16b,v18.16b,v22.16b ext v24.16b,v16.16b,v17.16b,#8 //pack Karatsuba pre-processed st1 {v23.2d,v24.2d,v25.2d},[x0],#48 //store Htable[3..5] //calculate H^5 and H^6 pmull2 v0.1q,v22.2d, v23.2d pmull2 v5.1q,v23.2d,v23.2d pmull v2.1q,v22.1d, v23.1d pmull v7.1q,v23.1d,v23.1d pmull v1.1q,v16.1d,v18.1d pmull v6.1q,v16.1d,v16.1d ext v16.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing ext v17.16b,v5.16b,v7.16b,#8 eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v16.16b eor v4.16b,v5.16b,v7.16b eor v6.16b,v6.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase eor v6.16b,v6.16b,v4.16b pmull v4.1q,v5.1d,v19.1d ins v2.d[0],v1.d[1] ins v7.d[0],v6.d[1] ins v1.d[1],v0.d[0] ins v6.d[1],v5.d[0] eor v0.16b,v1.16b,v18.16b eor v5.16b,v6.16b,v4.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase ext v4.16b,v5.16b,v5.16b,#8 pmull v0.1q,v0.1d,v19.1d pmull v5.1q,v5.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v4.16b,v4.16b,v7.16b eor v16.16b,v0.16b,v18.16b //H^5 eor v17.16b,v5.16b,v4.16b //H^6 ext v26.16b, v16.16b, v16.16b,#8 //Karatsuba pre-processing ext v28.16b, v17.16b, v17.16b,#8 ext v18.16b,v22.16b,v22.16b,#8 eor v16.16b,v16.16b,v26.16b eor v17.16b,v17.16b,v28.16b eor v18.16b,v18.16b,v22.16b ext v27.16b,v16.16b,v17.16b,#8 //pack Karatsuba pre-processed st1 {v26.2d,v27.2d,v28.2d},[x0],#48 //store Htable[6..8] //calculate H^7 and H^8 pmull2 v0.1q,v22.2d,v26.2d pmull2 v5.1q,v22.2d,v28.2d pmull v2.1q,v22.1d,v26.1d pmull v7.1q,v22.1d,v28.1d pmull v1.1q,v16.1d,v18.1d pmull v6.1q,v17.1d,v18.1d ext v16.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing ext v17.16b,v5.16b,v7.16b,#8 eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v16.16b eor v4.16b,v5.16b,v7.16b eor v6.16b,v6.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase eor v6.16b,v6.16b,v4.16b pmull v4.1q,v5.1d,v19.1d ins v2.d[0],v1.d[1] ins v7.d[0],v6.d[1] ins v1.d[1],v0.d[0] ins v6.d[1],v5.d[0] eor v0.16b,v1.16b,v18.16b eor v5.16b,v6.16b,v4.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase ext v4.16b,v5.16b,v5.16b,#8 pmull v0.1q,v0.1d,v19.1d pmull v5.1q,v5.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v4.16b,v4.16b,v7.16b eor v16.16b,v0.16b,v18.16b //H^7 eor v17.16b,v5.16b,v4.16b //H^8 ext v29.16b,v16.16b,v16.16b,#8 //Karatsuba pre-processing ext v31.16b,v17.16b,v17.16b,#8 eor v16.16b,v16.16b,v29.16b eor v17.16b,v17.16b,v31.16b ext v30.16b,v16.16b,v17.16b,#8 //pack Karatsuba pre-processed st1 {v29.2d,v30.2d,v31.2d},[x0] //store Htable[9..11] ret .globl _gcm_gmult_v8 .private_extern _gcm_gmult_v8 .align 4 _gcm_gmult_v8: AARCH64_VALID_CALL_TARGET ld1 {v17.2d},[x0] //load Xi movi v19.16b,#0xe1 ld1 {v20.2d,v21.2d},[x1] //load twisted H, ... ext v20.16b,v20.16b,v20.16b,#8 shl v19.2d,v19.2d,#57 #ifndef __AARCH64EB__ rev64 v17.16b,v17.16b #endif ext v3.16b,v17.16b,v17.16b,#8 pmull v0.1q,v20.1d,v3.1d //H.lo·Xi.lo eor v17.16b,v17.16b,v3.16b //Karatsuba pre-processing pmull2 v2.1q,v20.2d,v3.2d //H.hi·Xi.hi pmull v1.1q,v21.1d,v17.1d //(H.lo+H.hi)·(Xi.lo+Xi.hi) ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b #ifndef __AARCH64EB__ rev64 v0.16b,v0.16b #endif ext v0.16b,v0.16b,v0.16b,#8 st1 {v0.2d},[x0] //write out Xi ret .globl _gcm_ghash_v8 .private_extern _gcm_ghash_v8 .align 4 _gcm_ghash_v8: AARCH64_VALID_CALL_TARGET cmp x3,#64 b.hs Lgcm_ghash_v8_4x ld1 {v0.2d},[x0] //load [rotated] Xi //"[rotated]" means that //loaded value would have //to be rotated in order to //make it appear as in //algorithm specification subs x3,x3,#32 //see if x3 is 32 or larger mov x12,#16 //x12 is used as post- //increment for input pointer; //as loop is modulo-scheduled //x12 is zeroed just in time //to preclude overstepping //inp[len], which means that //last block[s] are actually //loaded twice, but last //copy is not processed ld1 {v20.2d,v21.2d},[x1],#32 //load twisted H, ..., H^2 ext v20.16b,v20.16b,v20.16b,#8 movi v19.16b,#0xe1 ld1 {v22.2d},[x1] ext v22.16b,v22.16b,v22.16b,#8 csel x12,xzr,x12,eq //is it time to zero x12? ext v0.16b,v0.16b,v0.16b,#8 //rotate Xi ld1 {v16.2d},[x2],#16 //load [rotated] I[0] shl v19.2d,v19.2d,#57 //compose 0xc2.0 constant #ifndef __AARCH64EB__ rev64 v16.16b,v16.16b rev64 v0.16b,v0.16b #endif ext v3.16b,v16.16b,v16.16b,#8 //rotate I[0] b.lo Lodd_tail_v8 //x3 was less than 32 ld1 {v17.2d},[x2],x12 //load [rotated] I[1] #ifndef __AARCH64EB__ rev64 v17.16b,v17.16b #endif ext v7.16b,v17.16b,v17.16b,#8 eor v3.16b,v3.16b,v0.16b //I[i]^=Xi pmull v4.1q,v20.1d,v7.1d //H·Ii+1 eor v17.16b,v17.16b,v7.16b //Karatsuba pre-processing pmull2 v6.1q,v20.2d,v7.2d b Loop_mod2x_v8 .align 4 Loop_mod2x_v8: ext v18.16b,v3.16b,v3.16b,#8 subs x3,x3,#32 //is there more data? pmull v0.1q,v22.1d,v3.1d //H^2.lo·Xi.lo csel x12,xzr,x12,lo //is it time to zero x12? pmull v5.1q,v21.1d,v17.1d eor v18.16b,v18.16b,v3.16b //Karatsuba pre-processing pmull2 v2.1q,v22.2d,v3.2d //H^2.hi·Xi.hi eor v0.16b,v0.16b,v4.16b //accumulate pmull2 v1.1q,v21.2d,v18.2d //(H^2.lo+H^2.hi)·(Xi.lo+Xi.hi) ld1 {v16.2d},[x2],x12 //load [rotated] I[i+2] eor v2.16b,v2.16b,v6.16b csel x12,xzr,x12,eq //is it time to zero x12? eor v1.16b,v1.16b,v5.16b ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b ld1 {v17.2d},[x2],x12 //load [rotated] I[i+3] #ifndef __AARCH64EB__ rev64 v16.16b,v16.16b #endif eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase of reduction #ifndef __AARCH64EB__ rev64 v17.16b,v17.16b #endif ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] ext v7.16b,v17.16b,v17.16b,#8 ext v3.16b,v16.16b,v16.16b,#8 eor v0.16b,v1.16b,v18.16b pmull v4.1q,v20.1d,v7.1d //H·Ii+1 eor v3.16b,v3.16b,v2.16b //accumulate v3.16b early ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v3.16b,v3.16b,v18.16b eor v17.16b,v17.16b,v7.16b //Karatsuba pre-processing eor v3.16b,v3.16b,v0.16b pmull2 v6.1q,v20.2d,v7.2d b.hs Loop_mod2x_v8 //there was at least 32 more bytes eor v2.16b,v2.16b,v18.16b ext v3.16b,v16.16b,v16.16b,#8 //re-construct v3.16b adds x3,x3,#32 //re-construct x3 eor v0.16b,v0.16b,v2.16b //re-construct v0.16b b.eq Ldone_v8 //is x3 zero? Lodd_tail_v8: ext v18.16b,v0.16b,v0.16b,#8 eor v3.16b,v3.16b,v0.16b //inp^=Xi eor v17.16b,v16.16b,v18.16b //v17.16b is rotated inp^Xi pmull v0.1q,v20.1d,v3.1d //H.lo·Xi.lo eor v17.16b,v17.16b,v3.16b //Karatsuba pre-processing pmull2 v2.1q,v20.2d,v3.2d //H.hi·Xi.hi pmull v1.1q,v21.1d,v17.1d //(H.lo+H.hi)·(Xi.lo+Xi.hi) ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b Ldone_v8: #ifndef __AARCH64EB__ rev64 v0.16b,v0.16b #endif ext v0.16b,v0.16b,v0.16b,#8 st1 {v0.2d},[x0] //write out Xi ret .align 4 gcm_ghash_v8_4x: Lgcm_ghash_v8_4x: ld1 {v0.2d},[x0] //load [rotated] Xi ld1 {v20.2d,v21.2d,v22.2d},[x1],#48 //load twisted H, ..., H^2 ext v20.16b,v20.16b,v20.16b,#8 ext v22.16b,v22.16b,v22.16b,#8 movi v19.16b,#0xe1 ld1 {v26.2d,v27.2d,v28.2d},[x1] //load twisted H^3, ..., H^4 ext v26.16b,v26.16b,v26.16b,#8 ext v28.16b,v28.16b,v28.16b,#8 shl v19.2d,v19.2d,#57 //compose 0xc2.0 constant ld1 {v4.2d,v5.2d,v6.2d,v7.2d},[x2],#64 #ifndef __AARCH64EB__ rev64 v0.16b,v0.16b rev64 v5.16b,v5.16b rev64 v6.16b,v6.16b rev64 v7.16b,v7.16b rev64 v4.16b,v4.16b #endif ext v25.16b,v7.16b,v7.16b,#8 ext v24.16b,v6.16b,v6.16b,#8 ext v23.16b,v5.16b,v5.16b,#8 pmull v29.1q,v20.1d,v25.1d //H·Ii+3 eor v7.16b,v7.16b,v25.16b pmull2 v31.1q,v20.2d,v25.2d pmull v30.1q,v21.1d,v7.1d pmull v16.1q,v22.1d,v24.1d //H^2·Ii+2 eor v6.16b,v6.16b,v24.16b pmull2 v24.1q,v22.2d,v24.2d pmull2 v6.1q,v21.2d,v6.2d eor v29.16b,v29.16b,v16.16b eor v31.16b,v31.16b,v24.16b eor v30.16b,v30.16b,v6.16b pmull v7.1q,v26.1d,v23.1d //H^3·Ii+1 eor v5.16b,v5.16b,v23.16b pmull2 v23.1q,v26.2d,v23.2d pmull v5.1q,v27.1d,v5.1d eor v29.16b,v29.16b,v7.16b eor v31.16b,v31.16b,v23.16b eor v30.16b,v30.16b,v5.16b subs x3,x3,#128 b.lo Ltail4x b Loop4x .align 4 Loop4x: eor v16.16b,v4.16b,v0.16b ld1 {v4.2d,v5.2d,v6.2d,v7.2d},[x2],#64 ext v3.16b,v16.16b,v16.16b,#8 #ifndef __AARCH64EB__ rev64 v5.16b,v5.16b rev64 v6.16b,v6.16b rev64 v7.16b,v7.16b rev64 v4.16b,v4.16b #endif pmull v0.1q,v28.1d,v3.1d //H^4·(Xi+Ii) eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v28.2d,v3.2d ext v25.16b,v7.16b,v7.16b,#8 pmull2 v1.1q,v27.2d,v16.2d eor v0.16b,v0.16b,v29.16b eor v2.16b,v2.16b,v31.16b ext v24.16b,v6.16b,v6.16b,#8 eor v1.16b,v1.16b,v30.16b ext v23.16b,v5.16b,v5.16b,#8 ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b pmull v29.1q,v20.1d,v25.1d //H·Ii+3 eor v7.16b,v7.16b,v25.16b eor v1.16b,v1.16b,v17.16b pmull2 v31.1q,v20.2d,v25.2d eor v1.16b,v1.16b,v18.16b pmull v30.1q,v21.1d,v7.1d pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] pmull v16.1q,v22.1d,v24.1d //H^2·Ii+2 eor v6.16b,v6.16b,v24.16b pmull2 v24.1q,v22.2d,v24.2d eor v0.16b,v1.16b,v18.16b pmull2 v6.1q,v21.2d,v6.2d eor v29.16b,v29.16b,v16.16b eor v31.16b,v31.16b,v24.16b eor v30.16b,v30.16b,v6.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d pmull v7.1q,v26.1d,v23.1d //H^3·Ii+1 eor v5.16b,v5.16b,v23.16b eor v18.16b,v18.16b,v2.16b pmull2 v23.1q,v26.2d,v23.2d pmull v5.1q,v27.1d,v5.1d eor v0.16b,v0.16b,v18.16b eor v29.16b,v29.16b,v7.16b eor v31.16b,v31.16b,v23.16b ext v0.16b,v0.16b,v0.16b,#8 eor v30.16b,v30.16b,v5.16b subs x3,x3,#64 b.hs Loop4x Ltail4x: eor v16.16b,v4.16b,v0.16b ext v3.16b,v16.16b,v16.16b,#8 pmull v0.1q,v28.1d,v3.1d //H^4·(Xi+Ii) eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v28.2d,v3.2d pmull2 v1.1q,v27.2d,v16.2d eor v0.16b,v0.16b,v29.16b eor v2.16b,v2.16b,v31.16b eor v1.16b,v1.16b,v30.16b adds x3,x3,#64 b.eq Ldone4x cmp x3,#32 b.lo Lone b.eq Ltwo Lthree: ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b ld1 {v4.2d,v5.2d,v6.2d},[x2] eor v1.16b,v1.16b,v18.16b #ifndef __AARCH64EB__ rev64 v5.16b,v5.16b rev64 v6.16b,v6.16b rev64 v4.16b,v4.16b #endif pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] ext v24.16b,v6.16b,v6.16b,#8 ext v23.16b,v5.16b,v5.16b,#8 eor v0.16b,v1.16b,v18.16b pmull v29.1q,v20.1d,v24.1d //H·Ii+2 eor v6.16b,v6.16b,v24.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b pmull2 v31.1q,v20.2d,v24.2d pmull v30.1q,v21.1d,v6.1d eor v0.16b,v0.16b,v18.16b pmull v7.1q,v22.1d,v23.1d //H^2·Ii+1 eor v5.16b,v5.16b,v23.16b ext v0.16b,v0.16b,v0.16b,#8 pmull2 v23.1q,v22.2d,v23.2d eor v16.16b,v4.16b,v0.16b pmull2 v5.1q,v21.2d,v5.2d ext v3.16b,v16.16b,v16.16b,#8 eor v29.16b,v29.16b,v7.16b eor v31.16b,v31.16b,v23.16b eor v30.16b,v30.16b,v5.16b pmull v0.1q,v26.1d,v3.1d //H^3·(Xi+Ii) eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v26.2d,v3.2d pmull v1.1q,v27.1d,v16.1d eor v0.16b,v0.16b,v29.16b eor v2.16b,v2.16b,v31.16b eor v1.16b,v1.16b,v30.16b b Ldone4x .align 4 Ltwo: ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b ld1 {v4.2d,v5.2d},[x2] eor v1.16b,v1.16b,v18.16b #ifndef __AARCH64EB__ rev64 v5.16b,v5.16b rev64 v4.16b,v4.16b #endif pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] ext v23.16b,v5.16b,v5.16b,#8 eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b ext v0.16b,v0.16b,v0.16b,#8 pmull v29.1q,v20.1d,v23.1d //H·Ii+1 eor v5.16b,v5.16b,v23.16b eor v16.16b,v4.16b,v0.16b ext v3.16b,v16.16b,v16.16b,#8 pmull2 v31.1q,v20.2d,v23.2d pmull v30.1q,v21.1d,v5.1d pmull v0.1q,v22.1d,v3.1d //H^2·(Xi+Ii) eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v22.2d,v3.2d pmull2 v1.1q,v21.2d,v16.2d eor v0.16b,v0.16b,v29.16b eor v2.16b,v2.16b,v31.16b eor v1.16b,v1.16b,v30.16b b Ldone4x .align 4 Lone: ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b ld1 {v4.2d},[x2] eor v1.16b,v1.16b,v18.16b #ifndef __AARCH64EB__ rev64 v4.16b,v4.16b #endif pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b ext v0.16b,v0.16b,v0.16b,#8 eor v16.16b,v4.16b,v0.16b ext v3.16b,v16.16b,v16.16b,#8 pmull v0.1q,v20.1d,v3.1d eor v16.16b,v16.16b,v3.16b pmull2 v2.1q,v20.2d,v3.2d pmull v1.1q,v21.1d,v16.1d Ldone4x: ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b eor v1.16b,v1.16b,v17.16b eor v1.16b,v1.16b,v18.16b pmull v18.1q,v0.1d,v19.1d //1st phase of reduction ins v2.d[0],v1.d[1] ins v1.d[1],v0.d[0] eor v0.16b,v1.16b,v18.16b ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction pmull v0.1q,v0.1d,v19.1d eor v18.16b,v18.16b,v2.16b eor v0.16b,v0.16b,v18.16b ext v0.16b,v0.16b,v0.16b,#8 #ifndef __AARCH64EB__ rev64 v0.16b,v0.16b #endif st1 {v0.2d},[x0] //write out Xi ret .byte 71,72,65,83,72,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
42,176
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/vpaes-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .section __TEXT,__const .align 7 // totally strategic alignment _vpaes_consts: Lk_mc_forward: // mc_forward .quad 0x0407060500030201, 0x0C0F0E0D080B0A09 .quad 0x080B0A0904070605, 0x000302010C0F0E0D .quad 0x0C0F0E0D080B0A09, 0x0407060500030201 .quad 0x000302010C0F0E0D, 0x080B0A0904070605 Lk_mc_backward: // mc_backward .quad 0x0605040702010003, 0x0E0D0C0F0A09080B .quad 0x020100030E0D0C0F, 0x0A09080B06050407 .quad 0x0E0D0C0F0A09080B, 0x0605040702010003 .quad 0x0A09080B06050407, 0x020100030E0D0C0F Lk_sr: // sr .quad 0x0706050403020100, 0x0F0E0D0C0B0A0908 .quad 0x030E09040F0A0500, 0x0B06010C07020D08 .quad 0x0F060D040B020900, 0x070E050C030A0108 .quad 0x0B0E0104070A0D00, 0x0306090C0F020508 // // "Hot" constants // Lk_inv: // inv, inva .quad 0x0E05060F0D080180, 0x040703090A0B0C02 .quad 0x01040A060F0B0780, 0x030D0E0C02050809 Lk_ipt: // input transform (lo, hi) .quad 0xC2B2E8985A2A7000, 0xCABAE09052227808 .quad 0x4C01307D317C4D00, 0xCD80B1FCB0FDCC81 Lk_sbo: // sbou, sbot .quad 0xD0D26D176FBDC700, 0x15AABF7AC502A878 .quad 0xCFE474A55FBB6A00, 0x8E1E90D1412B35FA Lk_sb1: // sb1u, sb1t .quad 0x3618D415FAE22300, 0x3BF7CCC10D2ED9EF .quad 0xB19BE18FCB503E00, 0xA5DF7A6E142AF544 Lk_sb2: // sb2u, sb2t .quad 0x69EB88400AE12900, 0xC2A163C8AB82234A .quad 0xE27A93C60B712400, 0x5EB7E955BC982FCD // // Decryption stuff // Lk_dipt: // decryption input transform .quad 0x0F505B040B545F00, 0x154A411E114E451A .quad 0x86E383E660056500, 0x12771772F491F194 Lk_dsbo: // decryption sbox final output .quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D .quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C Lk_dsb9: // decryption sbox output *9*u, *9*t .quad 0x851C03539A86D600, 0xCAD51F504F994CC9 .quad 0xC03B1789ECD74900, 0x725E2C9EB2FBA565 Lk_dsbd: // decryption sbox output *D*u, *D*t .quad 0x7D57CCDFE6B1A200, 0xF56E9B13882A4439 .quad 0x3CE2FAF724C6CB00, 0x2931180D15DEEFD3 Lk_dsbb: // decryption sbox output *B*u, *B*t .quad 0xD022649296B44200, 0x602646F6B0F2D404 .quad 0xC19498A6CD596700, 0xF3FF0C3E3255AA6B Lk_dsbe: // decryption sbox output *E*u, *E*t .quad 0x46F2929626D4D000, 0x2242600464B4F6B0 .quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32 // // Key schedule constants // Lk_dksd: // decryption key schedule: invskew x*D .quad 0xFEB91A5DA3E44700, 0x0740E3A45A1DBEF9 .quad 0x41C277F4B5368300, 0x5FDC69EAAB289D1E Lk_dksb: // decryption key schedule: invskew x*B .quad 0x9A4FCA1F8550D500, 0x03D653861CC94C99 .quad 0x115BEDA7B6FC4A00, 0xD993256F7E3482C8 Lk_dkse: // decryption key schedule: invskew x*E + 0x63 .quad 0xD5031CCA1FC9D600, 0x53859A4C994F5086 .quad 0xA23196054FDC7BE8, 0xCD5EF96A20B31487 Lk_dks9: // decryption key schedule: invskew x*9 .quad 0xB6116FC87ED9A700, 0x4AED933482255BFC .quad 0x4576516227143300, 0x8BB89FACE9DAFDCE Lk_rcon: // rcon .quad 0x1F8391B9AF9DEEB6, 0x702A98084D7C7D81 Lk_opt: // output transform .quad 0xFF9F4929D6B66000, 0xF7974121DEBE6808 .quad 0x01EDBD5150BCEC00, 0xE10D5DB1B05C0CE0 Lk_deskew: // deskew tables: inverts the sbox's "skew" .quad 0x07E4A34047A4E300, 0x1DFEB95A5DBEF91A .quad 0x5F36B5DC83EA6900, 0x2841C2ABF49D1E77 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65,69,83,32,102,111,114,32,65,82,77,118,56,44,32,77,105,107,101,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105,118,101,114,115,105,116,121,41,0 .align 2 .align 6 .text ## ## _aes_preheat ## ## Fills register %r10 -> .aes_consts (so you can -fPIC) ## and %xmm9-%xmm15 as specified below. ## .align 4 _vpaes_encrypt_preheat: adrp x10, Lk_inv@PAGE add x10, x10, Lk_inv@PAGEOFF movi v17.16b, #0x0f ld1 {v18.2d,v19.2d}, [x10],#32 // Lk_inv ld1 {v20.2d,v21.2d,v22.2d,v23.2d}, [x10],#64 // Lk_ipt, Lk_sbo ld1 {v24.2d,v25.2d,v26.2d,v27.2d}, [x10] // Lk_sb1, Lk_sb2 ret ## ## _aes_encrypt_core ## ## AES-encrypt %xmm0. ## ## Inputs: ## %xmm0 = input ## %xmm9-%xmm15 as in _vpaes_preheat ## (%rdx) = scheduled keys ## ## Output in %xmm0 ## Clobbers %xmm1-%xmm5, %r9, %r10, %r11, %rax ## Preserves %xmm6 - %xmm8 so you get some local vectors ## ## .align 4 _vpaes_encrypt_core: mov x9, x2 ldr w8, [x2,#240] // pull rounds adrp x11, Lk_mc_forward@PAGE+16 add x11, x11, Lk_mc_forward@PAGEOFF+16 // vmovdqa .Lk_ipt(%rip), %xmm2 # iptlo ld1 {v16.2d}, [x9], #16 // vmovdqu (%r9), %xmm5 # round0 key and v1.16b, v7.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v7.16b, #4 // vpsrlb $4, %xmm0, %xmm0 tbl v1.16b, {v20.16b}, v1.16b // vpshufb %xmm1, %xmm2, %xmm1 // vmovdqa .Lk_ipt+16(%rip), %xmm3 # ipthi tbl v2.16b, {v21.16b}, v0.16b // vpshufb %xmm0, %xmm3, %xmm2 eor v0.16b, v1.16b, v16.16b // vpxor %xmm5, %xmm1, %xmm0 eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 b Lenc_entry .align 4 Lenc_loop: // middle of middle round add x10, x11, #0x40 tbl v4.16b, {v25.16b}, v2.16b // vpshufb %xmm2, %xmm13, %xmm4 # 4 = sb1u ld1 {v1.2d}, [x11], #16 // vmovdqa -0x40(%r11,%r10), %xmm1 # Lk_mc_forward[] tbl v0.16b, {v24.16b}, v3.16b // vpshufb %xmm3, %xmm12, %xmm0 # 0 = sb1t eor v4.16b, v4.16b, v16.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = sb1u + k tbl v5.16b, {v27.16b}, v2.16b // vpshufb %xmm2, %xmm15, %xmm5 # 4 = sb2u eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = A tbl v2.16b, {v26.16b}, v3.16b // vpshufb %xmm3, %xmm14, %xmm2 # 2 = sb2t ld1 {v4.2d}, [x10] // vmovdqa (%r11,%r10), %xmm4 # Lk_mc_backward[] tbl v3.16b, {v0.16b}, v1.16b // vpshufb %xmm1, %xmm0, %xmm3 # 0 = B eor v2.16b, v2.16b, v5.16b // vpxor %xmm5, %xmm2, %xmm2 # 2 = 2A tbl v0.16b, {v0.16b}, v4.16b // vpshufb %xmm4, %xmm0, %xmm0 # 3 = D eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 0 = 2A+B tbl v4.16b, {v3.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm4 # 0 = 2B+C eor v0.16b, v0.16b, v3.16b // vpxor %xmm3, %xmm0, %xmm0 # 3 = 2A+B+D and x11, x11, #~(1<<6) // and $0x30, %r11 # ... mod 4 eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = 2A+3B+C+D sub w8, w8, #1 // nr-- Lenc_entry: // top of round and v1.16b, v0.16b, v17.16b // vpand %xmm0, %xmm9, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i tbl v5.16b, {v19.16b}, v1.16b // vpshufb %xmm1, %xmm11, %xmm5 # 2 = a/k eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j tbl v3.16b, {v18.16b}, v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i tbl v4.16b, {v18.16b}, v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j eor v3.16b, v3.16b, v5.16b // vpxor %xmm5, %xmm3, %xmm3 # 3 = iak = 1/i + a/k eor v4.16b, v4.16b, v5.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = jak = 1/j + a/k tbl v2.16b, {v18.16b}, v3.16b // vpshufb %xmm3, %xmm10, %xmm2 # 2 = 1/iak tbl v3.16b, {v18.16b}, v4.16b // vpshufb %xmm4, %xmm10, %xmm3 # 3 = 1/jak eor v2.16b, v2.16b, v1.16b // vpxor %xmm1, %xmm2, %xmm2 # 2 = io eor v3.16b, v3.16b, v0.16b // vpxor %xmm0, %xmm3, %xmm3 # 3 = jo ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm5 cbnz w8, Lenc_loop // middle of last round add x10, x11, #0x80 // vmovdqa -0x60(%r10), %xmm4 # 3 : sbou .Lk_sbo // vmovdqa -0x50(%r10), %xmm0 # 0 : sbot .Lk_sbo+16 tbl v4.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbou ld1 {v1.2d}, [x10] // vmovdqa 0x40(%r11,%r10), %xmm1 # Lk_sr[] tbl v0.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm0, %xmm0 # 0 = sb1t eor v4.16b, v4.16b, v16.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = sb1u + k eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = A tbl v0.16b, {v0.16b}, v1.16b // vpshufb %xmm1, %xmm0, %xmm0 ret .globl _vpaes_encrypt .private_extern _vpaes_encrypt .align 4 _vpaes_encrypt: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#4] // kFlag_vpaes_encrypt #endif AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ld1 {v7.16b}, [x0] bl _vpaes_encrypt_preheat bl _vpaes_encrypt_core st1 {v0.16b}, [x1] ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .align 4 _vpaes_encrypt_2x: mov x9, x2 ldr w8, [x2,#240] // pull rounds adrp x11, Lk_mc_forward@PAGE+16 add x11, x11, Lk_mc_forward@PAGEOFF+16 // vmovdqa .Lk_ipt(%rip), %xmm2 # iptlo ld1 {v16.2d}, [x9], #16 // vmovdqu (%r9), %xmm5 # round0 key and v1.16b, v14.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v14.16b, #4 // vpsrlb $4, %xmm0, %xmm0 and v9.16b, v15.16b, v17.16b ushr v8.16b, v15.16b, #4 tbl v1.16b, {v20.16b}, v1.16b // vpshufb %xmm1, %xmm2, %xmm1 tbl v9.16b, {v20.16b}, v9.16b // vmovdqa .Lk_ipt+16(%rip), %xmm3 # ipthi tbl v2.16b, {v21.16b}, v0.16b // vpshufb %xmm0, %xmm3, %xmm2 tbl v10.16b, {v21.16b}, v8.16b eor v0.16b, v1.16b, v16.16b // vpxor %xmm5, %xmm1, %xmm0 eor v8.16b, v9.16b, v16.16b eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 eor v8.16b, v8.16b, v10.16b b Lenc_2x_entry .align 4 Lenc_2x_loop: // middle of middle round add x10, x11, #0x40 tbl v4.16b, {v25.16b}, v2.16b // vpshufb %xmm2, %xmm13, %xmm4 # 4 = sb1u tbl v12.16b, {v25.16b}, v10.16b ld1 {v1.2d}, [x11], #16 // vmovdqa -0x40(%r11,%r10), %xmm1 # Lk_mc_forward[] tbl v0.16b, {v24.16b}, v3.16b // vpshufb %xmm3, %xmm12, %xmm0 # 0 = sb1t tbl v8.16b, {v24.16b}, v11.16b eor v4.16b, v4.16b, v16.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = sb1u + k eor v12.16b, v12.16b, v16.16b tbl v5.16b, {v27.16b}, v2.16b // vpshufb %xmm2, %xmm15, %xmm5 # 4 = sb2u tbl v13.16b, {v27.16b}, v10.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = A eor v8.16b, v8.16b, v12.16b tbl v2.16b, {v26.16b}, v3.16b // vpshufb %xmm3, %xmm14, %xmm2 # 2 = sb2t tbl v10.16b, {v26.16b}, v11.16b ld1 {v4.2d}, [x10] // vmovdqa (%r11,%r10), %xmm4 # Lk_mc_backward[] tbl v3.16b, {v0.16b}, v1.16b // vpshufb %xmm1, %xmm0, %xmm3 # 0 = B tbl v11.16b, {v8.16b}, v1.16b eor v2.16b, v2.16b, v5.16b // vpxor %xmm5, %xmm2, %xmm2 # 2 = 2A eor v10.16b, v10.16b, v13.16b tbl v0.16b, {v0.16b}, v4.16b // vpshufb %xmm4, %xmm0, %xmm0 # 3 = D tbl v8.16b, {v8.16b}, v4.16b eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 0 = 2A+B eor v11.16b, v11.16b, v10.16b tbl v4.16b, {v3.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm4 # 0 = 2B+C tbl v12.16b, {v11.16b},v1.16b eor v0.16b, v0.16b, v3.16b // vpxor %xmm3, %xmm0, %xmm0 # 3 = 2A+B+D eor v8.16b, v8.16b, v11.16b and x11, x11, #~(1<<6) // and $0x30, %r11 # ... mod 4 eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = 2A+3B+C+D eor v8.16b, v8.16b, v12.16b sub w8, w8, #1 // nr-- Lenc_2x_entry: // top of round and v1.16b, v0.16b, v17.16b // vpand %xmm0, %xmm9, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i and v9.16b, v8.16b, v17.16b ushr v8.16b, v8.16b, #4 tbl v5.16b, {v19.16b},v1.16b // vpshufb %xmm1, %xmm11, %xmm5 # 2 = a/k tbl v13.16b, {v19.16b},v9.16b eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j eor v9.16b, v9.16b, v8.16b tbl v3.16b, {v18.16b},v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i tbl v11.16b, {v18.16b},v8.16b tbl v4.16b, {v18.16b},v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j tbl v12.16b, {v18.16b},v9.16b eor v3.16b, v3.16b, v5.16b // vpxor %xmm5, %xmm3, %xmm3 # 3 = iak = 1/i + a/k eor v11.16b, v11.16b, v13.16b eor v4.16b, v4.16b, v5.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = jak = 1/j + a/k eor v12.16b, v12.16b, v13.16b tbl v2.16b, {v18.16b},v3.16b // vpshufb %xmm3, %xmm10, %xmm2 # 2 = 1/iak tbl v10.16b, {v18.16b},v11.16b tbl v3.16b, {v18.16b},v4.16b // vpshufb %xmm4, %xmm10, %xmm3 # 3 = 1/jak tbl v11.16b, {v18.16b},v12.16b eor v2.16b, v2.16b, v1.16b // vpxor %xmm1, %xmm2, %xmm2 # 2 = io eor v10.16b, v10.16b, v9.16b eor v3.16b, v3.16b, v0.16b // vpxor %xmm0, %xmm3, %xmm3 # 3 = jo eor v11.16b, v11.16b, v8.16b ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm5 cbnz w8, Lenc_2x_loop // middle of last round add x10, x11, #0x80 // vmovdqa -0x60(%r10), %xmm4 # 3 : sbou .Lk_sbo // vmovdqa -0x50(%r10), %xmm0 # 0 : sbot .Lk_sbo+16 tbl v4.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbou tbl v12.16b, {v22.16b}, v10.16b ld1 {v1.2d}, [x10] // vmovdqa 0x40(%r11,%r10), %xmm1 # Lk_sr[] tbl v0.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm0, %xmm0 # 0 = sb1t tbl v8.16b, {v23.16b}, v11.16b eor v4.16b, v4.16b, v16.16b // vpxor %xmm5, %xmm4, %xmm4 # 4 = sb1u + k eor v12.16b, v12.16b, v16.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 0 = A eor v8.16b, v8.16b, v12.16b tbl v0.16b, {v0.16b},v1.16b // vpshufb %xmm1, %xmm0, %xmm0 tbl v1.16b, {v8.16b},v1.16b ret .align 4 _vpaes_decrypt_preheat: adrp x10, Lk_inv@PAGE add x10, x10, Lk_inv@PAGEOFF movi v17.16b, #0x0f adrp x11, Lk_dipt@PAGE add x11, x11, Lk_dipt@PAGEOFF ld1 {v18.2d,v19.2d}, [x10],#32 // Lk_inv ld1 {v20.2d,v21.2d,v22.2d,v23.2d}, [x11],#64 // Lk_dipt, Lk_dsbo ld1 {v24.2d,v25.2d,v26.2d,v27.2d}, [x11],#64 // Lk_dsb9, Lk_dsbd ld1 {v28.2d,v29.2d,v30.2d,v31.2d}, [x11] // Lk_dsbb, Lk_dsbe ret ## ## Decryption core ## ## Same API as encryption core. ## .align 4 _vpaes_decrypt_core: mov x9, x2 ldr w8, [x2,#240] // pull rounds // vmovdqa .Lk_dipt(%rip), %xmm2 # iptlo lsl x11, x8, #4 // mov %rax, %r11; shl $4, %r11 eor x11, x11, #0x30 // xor $0x30, %r11 adrp x10, Lk_sr@PAGE add x10, x10, Lk_sr@PAGEOFF and x11, x11, #0x30 // and $0x30, %r11 add x11, x11, x10 adrp x10, Lk_mc_forward@PAGE+48 add x10, x10, Lk_mc_forward@PAGEOFF+48 ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm4 # round0 key and v1.16b, v7.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v7.16b, #4 // vpsrlb $4, %xmm0, %xmm0 tbl v2.16b, {v20.16b}, v1.16b // vpshufb %xmm1, %xmm2, %xmm2 ld1 {v5.2d}, [x10] // vmovdqa Lk_mc_forward+48(%rip), %xmm5 // vmovdqa .Lk_dipt+16(%rip), %xmm1 # ipthi tbl v0.16b, {v21.16b}, v0.16b // vpshufb %xmm0, %xmm1, %xmm0 eor v2.16b, v2.16b, v16.16b // vpxor %xmm4, %xmm2, %xmm2 eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 b Ldec_entry .align 4 Ldec_loop: // // Inverse mix columns // // vmovdqa -0x20(%r10),%xmm4 # 4 : sb9u // vmovdqa -0x10(%r10),%xmm1 # 0 : sb9t tbl v4.16b, {v24.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sb9u tbl v1.16b, {v25.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sb9t eor v0.16b, v4.16b, v16.16b // vpxor %xmm4, %xmm0, %xmm0 // vmovdqa 0x00(%r10),%xmm4 # 4 : sbdu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch // vmovdqa 0x10(%r10),%xmm1 # 0 : sbdt tbl v4.16b, {v26.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbdu tbl v0.16b, {v0.16b}, v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v1.16b, {v27.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbdt eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch // vmovdqa 0x20(%r10), %xmm4 # 4 : sbbu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch // vmovdqa 0x30(%r10), %xmm1 # 0 : sbbt tbl v4.16b, {v28.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbbu tbl v0.16b, {v0.16b}, v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v1.16b, {v29.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbbt eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch // vmovdqa 0x40(%r10), %xmm4 # 4 : sbeu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch // vmovdqa 0x50(%r10), %xmm1 # 0 : sbet tbl v4.16b, {v30.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbeu tbl v0.16b, {v0.16b}, v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v1.16b, {v31.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbet eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch ext v5.16b, v5.16b, v5.16b, #12 // vpalignr $12, %xmm5, %xmm5, %xmm5 eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch sub w8, w8, #1 // sub $1,%rax # nr-- Ldec_entry: // top of round and v1.16b, v0.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i tbl v2.16b, {v19.16b}, v1.16b // vpshufb %xmm1, %xmm11, %xmm2 # 2 = a/k eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j tbl v3.16b, {v18.16b}, v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i tbl v4.16b, {v18.16b}, v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 3 = iak = 1/i + a/k eor v4.16b, v4.16b, v2.16b // vpxor %xmm2, %xmm4, %xmm4 # 4 = jak = 1/j + a/k tbl v2.16b, {v18.16b}, v3.16b // vpshufb %xmm3, %xmm10, %xmm2 # 2 = 1/iak tbl v3.16b, {v18.16b}, v4.16b // vpshufb %xmm4, %xmm10, %xmm3 # 3 = 1/jak eor v2.16b, v2.16b, v1.16b // vpxor %xmm1, %xmm2, %xmm2 # 2 = io eor v3.16b, v3.16b, v0.16b // vpxor %xmm0, %xmm3, %xmm3 # 3 = jo ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm0 cbnz w8, Ldec_loop // middle of last round // vmovdqa 0x60(%r10), %xmm4 # 3 : sbou tbl v4.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbou // vmovdqa 0x70(%r10), %xmm1 # 0 : sbot ld1 {v2.2d}, [x11] // vmovdqa -0x160(%r11), %xmm2 # Lk_sr-Lk_dsbd=-0x160 tbl v1.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sb1t eor v4.16b, v4.16b, v16.16b // vpxor %xmm0, %xmm4, %xmm4 # 4 = sb1u + k eor v0.16b, v1.16b, v4.16b // vpxor %xmm4, %xmm1, %xmm0 # 0 = A tbl v0.16b, {v0.16b}, v2.16b // vpshufb %xmm2, %xmm0, %xmm0 ret .globl _vpaes_decrypt .private_extern _vpaes_decrypt .align 4 _vpaes_decrypt: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 ld1 {v7.16b}, [x0] bl _vpaes_decrypt_preheat bl _vpaes_decrypt_core st1 {v0.16b}, [x1] ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret // v14-v15 input, v0-v1 output .align 4 _vpaes_decrypt_2x: mov x9, x2 ldr w8, [x2,#240] // pull rounds // vmovdqa .Lk_dipt(%rip), %xmm2 # iptlo lsl x11, x8, #4 // mov %rax, %r11; shl $4, %r11 eor x11, x11, #0x30 // xor $0x30, %r11 adrp x10, Lk_sr@PAGE add x10, x10, Lk_sr@PAGEOFF and x11, x11, #0x30 // and $0x30, %r11 add x11, x11, x10 adrp x10, Lk_mc_forward@PAGE+48 add x10, x10, Lk_mc_forward@PAGEOFF+48 ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm4 # round0 key and v1.16b, v14.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v14.16b, #4 // vpsrlb $4, %xmm0, %xmm0 and v9.16b, v15.16b, v17.16b ushr v8.16b, v15.16b, #4 tbl v2.16b, {v20.16b},v1.16b // vpshufb %xmm1, %xmm2, %xmm2 tbl v10.16b, {v20.16b},v9.16b ld1 {v5.2d}, [x10] // vmovdqa Lk_mc_forward+48(%rip), %xmm5 // vmovdqa .Lk_dipt+16(%rip), %xmm1 # ipthi tbl v0.16b, {v21.16b},v0.16b // vpshufb %xmm0, %xmm1, %xmm0 tbl v8.16b, {v21.16b},v8.16b eor v2.16b, v2.16b, v16.16b // vpxor %xmm4, %xmm2, %xmm2 eor v10.16b, v10.16b, v16.16b eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 eor v8.16b, v8.16b, v10.16b b Ldec_2x_entry .align 4 Ldec_2x_loop: // // Inverse mix columns // // vmovdqa -0x20(%r10),%xmm4 # 4 : sb9u // vmovdqa -0x10(%r10),%xmm1 # 0 : sb9t tbl v4.16b, {v24.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sb9u tbl v12.16b, {v24.16b}, v10.16b tbl v1.16b, {v25.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sb9t tbl v9.16b, {v25.16b}, v11.16b eor v0.16b, v4.16b, v16.16b // vpxor %xmm4, %xmm0, %xmm0 eor v8.16b, v12.16b, v16.16b // vmovdqa 0x00(%r10),%xmm4 # 4 : sbdu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch eor v8.16b, v8.16b, v9.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch // vmovdqa 0x10(%r10),%xmm1 # 0 : sbdt tbl v4.16b, {v26.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbdu tbl v12.16b, {v26.16b}, v10.16b tbl v0.16b, {v0.16b},v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v8.16b, {v8.16b},v5.16b tbl v1.16b, {v27.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbdt tbl v9.16b, {v27.16b}, v11.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch eor v8.16b, v8.16b, v12.16b // vmovdqa 0x20(%r10), %xmm4 # 4 : sbbu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch eor v8.16b, v8.16b, v9.16b // vmovdqa 0x30(%r10), %xmm1 # 0 : sbbt tbl v4.16b, {v28.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbbu tbl v12.16b, {v28.16b}, v10.16b tbl v0.16b, {v0.16b},v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v8.16b, {v8.16b},v5.16b tbl v1.16b, {v29.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbbt tbl v9.16b, {v29.16b}, v11.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch eor v8.16b, v8.16b, v12.16b // vmovdqa 0x40(%r10), %xmm4 # 4 : sbeu eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch eor v8.16b, v8.16b, v9.16b // vmovdqa 0x50(%r10), %xmm1 # 0 : sbet tbl v4.16b, {v30.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbeu tbl v12.16b, {v30.16b}, v10.16b tbl v0.16b, {v0.16b},v5.16b // vpshufb %xmm5, %xmm0, %xmm0 # MC ch tbl v8.16b, {v8.16b},v5.16b tbl v1.16b, {v31.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sbet tbl v9.16b, {v31.16b}, v11.16b eor v0.16b, v0.16b, v4.16b // vpxor %xmm4, %xmm0, %xmm0 # 4 = ch eor v8.16b, v8.16b, v12.16b ext v5.16b, v5.16b, v5.16b, #12 // vpalignr $12, %xmm5, %xmm5, %xmm5 eor v0.16b, v0.16b, v1.16b // vpxor %xmm1, %xmm0, %xmm0 # 0 = ch eor v8.16b, v8.16b, v9.16b sub w8, w8, #1 // sub $1,%rax # nr-- Ldec_2x_entry: // top of round and v1.16b, v0.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i and v9.16b, v8.16b, v17.16b ushr v8.16b, v8.16b, #4 tbl v2.16b, {v19.16b},v1.16b // vpshufb %xmm1, %xmm11, %xmm2 # 2 = a/k tbl v10.16b, {v19.16b},v9.16b eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j eor v9.16b, v9.16b, v8.16b tbl v3.16b, {v18.16b},v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i tbl v11.16b, {v18.16b},v8.16b tbl v4.16b, {v18.16b},v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j tbl v12.16b, {v18.16b},v9.16b eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 3 = iak = 1/i + a/k eor v11.16b, v11.16b, v10.16b eor v4.16b, v4.16b, v2.16b // vpxor %xmm2, %xmm4, %xmm4 # 4 = jak = 1/j + a/k eor v12.16b, v12.16b, v10.16b tbl v2.16b, {v18.16b},v3.16b // vpshufb %xmm3, %xmm10, %xmm2 # 2 = 1/iak tbl v10.16b, {v18.16b},v11.16b tbl v3.16b, {v18.16b},v4.16b // vpshufb %xmm4, %xmm10, %xmm3 # 3 = 1/jak tbl v11.16b, {v18.16b},v12.16b eor v2.16b, v2.16b, v1.16b // vpxor %xmm1, %xmm2, %xmm2 # 2 = io eor v10.16b, v10.16b, v9.16b eor v3.16b, v3.16b, v0.16b // vpxor %xmm0, %xmm3, %xmm3 # 3 = jo eor v11.16b, v11.16b, v8.16b ld1 {v16.2d}, [x9],#16 // vmovdqu (%r9), %xmm0 cbnz w8, Ldec_2x_loop // middle of last round // vmovdqa 0x60(%r10), %xmm4 # 3 : sbou tbl v4.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm4, %xmm4 # 4 = sbou tbl v12.16b, {v22.16b}, v10.16b // vmovdqa 0x70(%r10), %xmm1 # 0 : sbot tbl v1.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm1, %xmm1 # 0 = sb1t tbl v9.16b, {v23.16b}, v11.16b ld1 {v2.2d}, [x11] // vmovdqa -0x160(%r11), %xmm2 # Lk_sr-Lk_dsbd=-0x160 eor v4.16b, v4.16b, v16.16b // vpxor %xmm0, %xmm4, %xmm4 # 4 = sb1u + k eor v12.16b, v12.16b, v16.16b eor v0.16b, v1.16b, v4.16b // vpxor %xmm4, %xmm1, %xmm0 # 0 = A eor v8.16b, v9.16b, v12.16b tbl v0.16b, {v0.16b},v2.16b // vpshufb %xmm2, %xmm0, %xmm0 tbl v1.16b, {v8.16b},v2.16b ret ######################################################## ## ## ## AES key schedule ## ## ## ######################################################## .align 4 _vpaes_key_preheat: adrp x10, Lk_inv@PAGE add x10, x10, Lk_inv@PAGEOFF movi v16.16b, #0x5b // Lk_s63 adrp x11, Lk_sb1@PAGE add x11, x11, Lk_sb1@PAGEOFF movi v17.16b, #0x0f // Lk_s0F ld1 {v18.2d,v19.2d,v20.2d,v21.2d}, [x10] // Lk_inv, Lk_ipt adrp x10, Lk_dksd@PAGE add x10, x10, Lk_dksd@PAGEOFF ld1 {v22.2d,v23.2d}, [x11] // Lk_sb1 adrp x11, Lk_mc_forward@PAGE add x11, x11, Lk_mc_forward@PAGEOFF ld1 {v24.2d,v25.2d,v26.2d,v27.2d}, [x10],#64 // Lk_dksd, Lk_dksb ld1 {v28.2d,v29.2d,v30.2d,v31.2d}, [x10],#64 // Lk_dkse, Lk_dks9 ld1 {v8.2d}, [x10] // Lk_rcon ld1 {v9.2d}, [x11] // Lk_mc_forward[0] ret .align 4 _vpaes_schedule_core: AARCH64_SIGN_LINK_REGISTER stp x29, x30, [sp,#-16]! add x29,sp,#0 bl _vpaes_key_preheat // load the tables ld1 {v0.16b}, [x0],#16 // vmovdqu (%rdi), %xmm0 # load key (unaligned) // input transform mov v3.16b, v0.16b // vmovdqa %xmm0, %xmm3 bl _vpaes_schedule_transform mov v7.16b, v0.16b // vmovdqa %xmm0, %xmm7 adrp x10, Lk_sr@PAGE // lea Lk_sr(%rip),%r10 add x10, x10, Lk_sr@PAGEOFF add x8, x8, x10 cbnz w3, Lschedule_am_decrypting // encrypting, output zeroth round key after transform st1 {v0.2d}, [x2] // vmovdqu %xmm0, (%rdx) b Lschedule_go Lschedule_am_decrypting: // decrypting, output zeroth round key after shiftrows ld1 {v1.2d}, [x8] // vmovdqa (%r8,%r10), %xmm1 tbl v3.16b, {v3.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 st1 {v3.2d}, [x2] // vmovdqu %xmm3, (%rdx) eor x8, x8, #0x30 // xor $0x30, %r8 Lschedule_go: cmp w1, #192 // cmp $192, %esi b.hi Lschedule_256 b.eq Lschedule_192 // 128: fall though ## ## .schedule_128 ## ## 128-bit specific part of key schedule. ## ## This schedule is really simple, because all its parts ## are accomplished by the subroutines. ## Lschedule_128: mov x0, #10 // mov $10, %esi Loop_schedule_128: sub x0, x0, #1 // dec %esi bl _vpaes_schedule_round cbz x0, Lschedule_mangle_last bl _vpaes_schedule_mangle // write output b Loop_schedule_128 ## ## .aes_schedule_192 ## ## 192-bit specific part of key schedule. ## ## The main body of this schedule is the same as the 128-bit ## schedule, but with more smearing. The long, high side is ## stored in %xmm7 as before, and the short, low side is in ## the high bits of %xmm6. ## ## This schedule is somewhat nastier, however, because each ## round produces 192 bits of key material, or 1.5 round keys. ## Therefore, on each cycle we do 2 rounds and produce 3 round ## keys. ## .align 4 Lschedule_192: sub x0, x0, #8 ld1 {v0.16b}, [x0] // vmovdqu 8(%rdi),%xmm0 # load key part 2 (very unaligned) bl _vpaes_schedule_transform // input transform mov v6.16b, v0.16b // vmovdqa %xmm0, %xmm6 # save short part eor v4.16b, v4.16b, v4.16b // vpxor %xmm4, %xmm4, %xmm4 # clear 4 ins v6.d[0], v4.d[0] // vmovhlps %xmm4, %xmm6, %xmm6 # clobber low side with zeros mov x0, #4 // mov $4, %esi Loop_schedule_192: sub x0, x0, #1 // dec %esi bl _vpaes_schedule_round ext v0.16b, v6.16b, v0.16b, #8 // vpalignr $8,%xmm6,%xmm0,%xmm0 bl _vpaes_schedule_mangle // save key n bl _vpaes_schedule_192_smear bl _vpaes_schedule_mangle // save key n+1 bl _vpaes_schedule_round cbz x0, Lschedule_mangle_last bl _vpaes_schedule_mangle // save key n+2 bl _vpaes_schedule_192_smear b Loop_schedule_192 ## ## .aes_schedule_256 ## ## 256-bit specific part of key schedule. ## ## The structure here is very similar to the 128-bit ## schedule, but with an additional "low side" in ## %xmm6. The low side's rounds are the same as the ## high side's, except no rcon and no rotation. ## .align 4 Lschedule_256: ld1 {v0.16b}, [x0] // vmovdqu 16(%rdi),%xmm0 # load key part 2 (unaligned) bl _vpaes_schedule_transform // input transform mov x0, #7 // mov $7, %esi Loop_schedule_256: sub x0, x0, #1 // dec %esi bl _vpaes_schedule_mangle // output low result mov v6.16b, v0.16b // vmovdqa %xmm0, %xmm6 # save cur_lo in xmm6 // high round bl _vpaes_schedule_round cbz x0, Lschedule_mangle_last bl _vpaes_schedule_mangle // low round. swap xmm7 and xmm6 dup v0.4s, v0.s[3] // vpshufd $0xFF, %xmm0, %xmm0 movi v4.16b, #0 mov v5.16b, v7.16b // vmovdqa %xmm7, %xmm5 mov v7.16b, v6.16b // vmovdqa %xmm6, %xmm7 bl _vpaes_schedule_low_round mov v7.16b, v5.16b // vmovdqa %xmm5, %xmm7 b Loop_schedule_256 ## ## .aes_schedule_mangle_last ## ## Mangler for last round of key schedule ## Mangles %xmm0 ## when encrypting, outputs out(%xmm0) ^ 63 ## when decrypting, outputs unskew(%xmm0) ## ## Always called right before return... jumps to cleanup and exits ## .align 4 Lschedule_mangle_last: // schedule last round key from xmm0 adrp x11, Lk_deskew@PAGE // lea Lk_deskew(%rip),%r11 # prepare to deskew add x11, x11, Lk_deskew@PAGEOFF cbnz w3, Lschedule_mangle_last_dec // encrypting ld1 {v1.2d}, [x8] // vmovdqa (%r8,%r10),%xmm1 adrp x11, Lk_opt@PAGE // lea Lk_opt(%rip), %r11 # prepare to output transform add x11, x11, Lk_opt@PAGEOFF add x2, x2, #32 // add $32, %rdx tbl v0.16b, {v0.16b}, v1.16b // vpshufb %xmm1, %xmm0, %xmm0 # output permute Lschedule_mangle_last_dec: ld1 {v20.2d,v21.2d}, [x11] // reload constants sub x2, x2, #16 // add $-16, %rdx eor v0.16b, v0.16b, v16.16b // vpxor Lk_s63(%rip), %xmm0, %xmm0 bl _vpaes_schedule_transform // output transform st1 {v0.2d}, [x2] // vmovdqu %xmm0, (%rdx) # save last key // cleanup eor v0.16b, v0.16b, v0.16b // vpxor %xmm0, %xmm0, %xmm0 eor v1.16b, v1.16b, v1.16b // vpxor %xmm1, %xmm1, %xmm1 eor v2.16b, v2.16b, v2.16b // vpxor %xmm2, %xmm2, %xmm2 eor v3.16b, v3.16b, v3.16b // vpxor %xmm3, %xmm3, %xmm3 eor v4.16b, v4.16b, v4.16b // vpxor %xmm4, %xmm4, %xmm4 eor v5.16b, v5.16b, v5.16b // vpxor %xmm5, %xmm5, %xmm5 eor v6.16b, v6.16b, v6.16b // vpxor %xmm6, %xmm6, %xmm6 eor v7.16b, v7.16b, v7.16b // vpxor %xmm7, %xmm7, %xmm7 ldp x29, x30, [sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret ## ## .aes_schedule_192_smear ## ## Smear the short, low side in the 192-bit key schedule. ## ## Inputs: ## %xmm7: high side, b a x y ## %xmm6: low side, d c 0 0 ## %xmm13: 0 ## ## Outputs: ## %xmm6: b+c+d b+c 0 0 ## %xmm0: b+c+d b+c b a ## .align 4 _vpaes_schedule_192_smear: movi v1.16b, #0 dup v0.4s, v7.s[3] ins v1.s[3], v6.s[2] // vpshufd $0x80, %xmm6, %xmm1 # d c 0 0 -> c 0 0 0 ins v0.s[0], v7.s[2] // vpshufd $0xFE, %xmm7, %xmm0 # b a _ _ -> b b b a eor v6.16b, v6.16b, v1.16b // vpxor %xmm1, %xmm6, %xmm6 # -> c+d c 0 0 eor v1.16b, v1.16b, v1.16b // vpxor %xmm1, %xmm1, %xmm1 eor v6.16b, v6.16b, v0.16b // vpxor %xmm0, %xmm6, %xmm6 # -> b+c+d b+c b a mov v0.16b, v6.16b // vmovdqa %xmm6, %xmm0 ins v6.d[0], v1.d[0] // vmovhlps %xmm1, %xmm6, %xmm6 # clobber low side with zeros ret ## ## .aes_schedule_round ## ## Runs one main round of the key schedule on %xmm0, %xmm7 ## ## Specifically, runs subbytes on the high dword of %xmm0 ## then rotates it by one byte and xors into the low dword of ## %xmm7. ## ## Adds rcon from low byte of %xmm8, then rotates %xmm8 for ## next rcon. ## ## Smears the dwords of %xmm7 by xoring the low into the ## second low, result into third, result into highest. ## ## Returns results in %xmm7 = %xmm0. ## Clobbers %xmm1-%xmm4, %r11. ## .align 4 _vpaes_schedule_round: // extract rcon from xmm8 movi v4.16b, #0 // vpxor %xmm4, %xmm4, %xmm4 ext v1.16b, v8.16b, v4.16b, #15 // vpalignr $15, %xmm8, %xmm4, %xmm1 ext v8.16b, v8.16b, v8.16b, #15 // vpalignr $15, %xmm8, %xmm8, %xmm8 eor v7.16b, v7.16b, v1.16b // vpxor %xmm1, %xmm7, %xmm7 // rotate dup v0.4s, v0.s[3] // vpshufd $0xFF, %xmm0, %xmm0 ext v0.16b, v0.16b, v0.16b, #1 // vpalignr $1, %xmm0, %xmm0, %xmm0 // fall through... // low round: same as high round, but no rotation and no rcon. _vpaes_schedule_low_round: // smear xmm7 ext v1.16b, v4.16b, v7.16b, #12 // vpslldq $4, %xmm7, %xmm1 eor v7.16b, v7.16b, v1.16b // vpxor %xmm1, %xmm7, %xmm7 ext v4.16b, v4.16b, v7.16b, #8 // vpslldq $8, %xmm7, %xmm4 // subbytes and v1.16b, v0.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 # 0 = k ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 # 1 = i eor v7.16b, v7.16b, v4.16b // vpxor %xmm4, %xmm7, %xmm7 tbl v2.16b, {v19.16b}, v1.16b // vpshufb %xmm1, %xmm11, %xmm2 # 2 = a/k eor v1.16b, v1.16b, v0.16b // vpxor %xmm0, %xmm1, %xmm1 # 0 = j tbl v3.16b, {v18.16b}, v0.16b // vpshufb %xmm0, %xmm10, %xmm3 # 3 = 1/i eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 # 3 = iak = 1/i + a/k tbl v4.16b, {v18.16b}, v1.16b // vpshufb %xmm1, %xmm10, %xmm4 # 4 = 1/j eor v7.16b, v7.16b, v16.16b // vpxor Lk_s63(%rip), %xmm7, %xmm7 tbl v3.16b, {v18.16b}, v3.16b // vpshufb %xmm3, %xmm10, %xmm3 # 2 = 1/iak eor v4.16b, v4.16b, v2.16b // vpxor %xmm2, %xmm4, %xmm4 # 4 = jak = 1/j + a/k tbl v2.16b, {v18.16b}, v4.16b // vpshufb %xmm4, %xmm10, %xmm2 # 3 = 1/jak eor v3.16b, v3.16b, v1.16b // vpxor %xmm1, %xmm3, %xmm3 # 2 = io eor v2.16b, v2.16b, v0.16b // vpxor %xmm0, %xmm2, %xmm2 # 3 = jo tbl v4.16b, {v23.16b}, v3.16b // vpshufb %xmm3, %xmm13, %xmm4 # 4 = sbou tbl v1.16b, {v22.16b}, v2.16b // vpshufb %xmm2, %xmm12, %xmm1 # 0 = sb1t eor v1.16b, v1.16b, v4.16b // vpxor %xmm4, %xmm1, %xmm1 # 0 = sbox output // add in smeared stuff eor v0.16b, v1.16b, v7.16b // vpxor %xmm7, %xmm1, %xmm0 eor v7.16b, v1.16b, v7.16b // vmovdqa %xmm0, %xmm7 ret ## ## .aes_schedule_transform ## ## Linear-transform %xmm0 according to tables at (%r11) ## ## Requires that %xmm9 = 0x0F0F... as in preheat ## Output in %xmm0 ## Clobbers %xmm1, %xmm2 ## .align 4 _vpaes_schedule_transform: and v1.16b, v0.16b, v17.16b // vpand %xmm9, %xmm0, %xmm1 ushr v0.16b, v0.16b, #4 // vpsrlb $4, %xmm0, %xmm0 // vmovdqa (%r11), %xmm2 # lo tbl v2.16b, {v20.16b}, v1.16b // vpshufb %xmm1, %xmm2, %xmm2 // vmovdqa 16(%r11), %xmm1 # hi tbl v0.16b, {v21.16b}, v0.16b // vpshufb %xmm0, %xmm1, %xmm0 eor v0.16b, v0.16b, v2.16b // vpxor %xmm2, %xmm0, %xmm0 ret ## ## .aes_schedule_mangle ## ## Mangle xmm0 from (basis-transformed) standard version ## to our version. ## ## On encrypt, ## xor with 0x63 ## multiply by circulant 0,1,1,1 ## apply shiftrows transform ## ## On decrypt, ## xor with 0x63 ## multiply by "inverse mixcolumns" circulant E,B,D,9 ## deskew ## apply shiftrows transform ## ## ## Writes out to (%rdx), and increments or decrements it ## Keeps track of round number mod 4 in %r8 ## Preserves xmm0 ## Clobbers xmm1-xmm5 ## .align 4 _vpaes_schedule_mangle: mov v4.16b, v0.16b // vmovdqa %xmm0, %xmm4 # save xmm0 for later // vmovdqa .Lk_mc_forward(%rip),%xmm5 cbnz w3, Lschedule_mangle_dec // encrypting eor v4.16b, v0.16b, v16.16b // vpxor Lk_s63(%rip), %xmm0, %xmm4 add x2, x2, #16 // add $16, %rdx tbl v4.16b, {v4.16b}, v9.16b // vpshufb %xmm5, %xmm4, %xmm4 tbl v1.16b, {v4.16b}, v9.16b // vpshufb %xmm5, %xmm4, %xmm1 tbl v3.16b, {v1.16b}, v9.16b // vpshufb %xmm5, %xmm1, %xmm3 eor v4.16b, v4.16b, v1.16b // vpxor %xmm1, %xmm4, %xmm4 ld1 {v1.2d}, [x8] // vmovdqa (%r8,%r10), %xmm1 eor v3.16b, v3.16b, v4.16b // vpxor %xmm4, %xmm3, %xmm3 b Lschedule_mangle_both .align 4 Lschedule_mangle_dec: // inverse mix columns // lea .Lk_dksd(%rip),%r11 ushr v1.16b, v4.16b, #4 // vpsrlb $4, %xmm4, %xmm1 # 1 = hi and v4.16b, v4.16b, v17.16b // vpand %xmm9, %xmm4, %xmm4 # 4 = lo // vmovdqa 0x00(%r11), %xmm2 tbl v2.16b, {v24.16b}, v4.16b // vpshufb %xmm4, %xmm2, %xmm2 // vmovdqa 0x10(%r11), %xmm3 tbl v3.16b, {v25.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 tbl v3.16b, {v3.16b}, v9.16b // vpshufb %xmm5, %xmm3, %xmm3 // vmovdqa 0x20(%r11), %xmm2 tbl v2.16b, {v26.16b}, v4.16b // vpshufb %xmm4, %xmm2, %xmm2 eor v2.16b, v2.16b, v3.16b // vpxor %xmm3, %xmm2, %xmm2 // vmovdqa 0x30(%r11), %xmm3 tbl v3.16b, {v27.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 tbl v3.16b, {v3.16b}, v9.16b // vpshufb %xmm5, %xmm3, %xmm3 // vmovdqa 0x40(%r11), %xmm2 tbl v2.16b, {v28.16b}, v4.16b // vpshufb %xmm4, %xmm2, %xmm2 eor v2.16b, v2.16b, v3.16b // vpxor %xmm3, %xmm2, %xmm2 // vmovdqa 0x50(%r11), %xmm3 tbl v3.16b, {v29.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 eor v3.16b, v3.16b, v2.16b // vpxor %xmm2, %xmm3, %xmm3 // vmovdqa 0x60(%r11), %xmm2 tbl v2.16b, {v30.16b}, v4.16b // vpshufb %xmm4, %xmm2, %xmm2 tbl v3.16b, {v3.16b}, v9.16b // vpshufb %xmm5, %xmm3, %xmm3 // vmovdqa 0x70(%r11), %xmm4 tbl v4.16b, {v31.16b}, v1.16b // vpshufb %xmm1, %xmm4, %xmm4 ld1 {v1.2d}, [x8] // vmovdqa (%r8,%r10), %xmm1 eor v2.16b, v2.16b, v3.16b // vpxor %xmm3, %xmm2, %xmm2 eor v3.16b, v4.16b, v2.16b // vpxor %xmm2, %xmm4, %xmm3 sub x2, x2, #16 // add $-16, %rdx Lschedule_mangle_both: tbl v3.16b, {v3.16b}, v1.16b // vpshufb %xmm1, %xmm3, %xmm3 add x8, x8, #48 // add $-16, %r8 and x8, x8, #~(1<<6) // and $0x30, %r8 st1 {v3.2d}, [x2] // vmovdqu %xmm3, (%rdx) ret .globl _vpaes_set_encrypt_key .private_extern _vpaes_set_encrypt_key .align 4 _vpaes_set_encrypt_key: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#5] // kFlag_vpaes_set_encrypt_key #endif AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 stp d8,d9,[sp,#-16]! // ABI spec says so lsr w9, w1, #5 // shr $5,%eax add w9, w9, #5 // $5,%eax str w9, [x2,#240] // mov %eax,240(%rdx) # AES_KEY->rounds = nbits/32+5; mov w3, #0 // mov $0,%ecx mov x8, #0x30 // mov $0x30,%r8d bl _vpaes_schedule_core eor x0, x0, x0 ldp d8,d9,[sp],#16 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .globl _vpaes_set_decrypt_key .private_extern _vpaes_set_decrypt_key .align 4 _vpaes_set_decrypt_key: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 stp d8,d9,[sp,#-16]! // ABI spec says so lsr w9, w1, #5 // shr $5,%eax add w9, w9, #5 // $5,%eax str w9, [x2,#240] // mov %eax,240(%rdx) # AES_KEY->rounds = nbits/32+5; lsl w9, w9, #4 // shl $4,%eax add x2, x2, #16 // lea 16(%rdx,%rax),%rdx add x2, x2, x9 mov w3, #1 // mov $1,%ecx lsr w8, w1, #1 // shr $1,%r8d and x8, x8, #32 // and $32,%r8d eor x8, x8, #32 // xor $32,%r8d # nbits==192?0:32 bl _vpaes_schedule_core ldp d8,d9,[sp],#16 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .globl _vpaes_cbc_encrypt .private_extern _vpaes_cbc_encrypt .align 4 _vpaes_cbc_encrypt: AARCH64_SIGN_LINK_REGISTER cbz x2, Lcbc_abort cmp w5, #0 // check direction b.eq vpaes_cbc_decrypt stp x29,x30,[sp,#-16]! add x29,sp,#0 mov x17, x2 // reassign mov x2, x3 // reassign ld1 {v0.16b}, [x4] // load ivec bl _vpaes_encrypt_preheat b Lcbc_enc_loop .align 4 Lcbc_enc_loop: ld1 {v7.16b}, [x0],#16 // load input eor v7.16b, v7.16b, v0.16b // xor with ivec bl _vpaes_encrypt_core st1 {v0.16b}, [x1],#16 // save output subs x17, x17, #16 b.hi Lcbc_enc_loop st1 {v0.16b}, [x4] // write ivec ldp x29,x30,[sp],#16 Lcbc_abort: AARCH64_VALIDATE_LINK_REGISTER ret .align 4 vpaes_cbc_decrypt: // Not adding AARCH64_SIGN_LINK_REGISTER here because vpaes_cbc_decrypt is jumped to // only from vpaes_cbc_encrypt which has already signed the return address. stp x29,x30,[sp,#-16]! add x29,sp,#0 stp d8,d9,[sp,#-16]! // ABI spec says so stp d10,d11,[sp,#-16]! stp d12,d13,[sp,#-16]! stp d14,d15,[sp,#-16]! mov x17, x2 // reassign mov x2, x3 // reassign ld1 {v6.16b}, [x4] // load ivec bl _vpaes_decrypt_preheat tst x17, #16 b.eq Lcbc_dec_loop2x ld1 {v7.16b}, [x0], #16 // load input bl _vpaes_decrypt_core eor v0.16b, v0.16b, v6.16b // xor with ivec orr v6.16b, v7.16b, v7.16b // next ivec value st1 {v0.16b}, [x1], #16 subs x17, x17, #16 b.ls Lcbc_dec_done .align 4 Lcbc_dec_loop2x: ld1 {v14.16b,v15.16b}, [x0], #32 bl _vpaes_decrypt_2x eor v0.16b, v0.16b, v6.16b // xor with ivec eor v1.16b, v1.16b, v14.16b orr v6.16b, v15.16b, v15.16b st1 {v0.16b,v1.16b}, [x1], #32 subs x17, x17, #32 b.hi Lcbc_dec_loop2x Lcbc_dec_done: st1 {v6.16b}, [x4] ldp d14,d15,[sp],#16 ldp d12,d13,[sp],#16 ldp d10,d11,[sp],#16 ldp d8,d9,[sp],#16 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .globl _vpaes_ctr32_encrypt_blocks .private_extern _vpaes_ctr32_encrypt_blocks .align 4 _vpaes_ctr32_encrypt_blocks: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 stp d8,d9,[sp,#-16]! // ABI spec says so stp d10,d11,[sp,#-16]! stp d12,d13,[sp,#-16]! stp d14,d15,[sp,#-16]! cbz x2, Lctr32_done // Note, unlike the other functions, x2 here is measured in blocks, // not bytes. mov x17, x2 mov x2, x3 // Load the IV and counter portion. ldr w6, [x4, #12] ld1 {v7.16b}, [x4] bl _vpaes_encrypt_preheat tst x17, #1 rev w6, w6 // The counter is big-endian. b.eq Lctr32_prep_loop // Handle one block so the remaining block count is even for // _vpaes_encrypt_2x. ld1 {v6.16b}, [x0], #16 // Load input ahead of time bl _vpaes_encrypt_core eor v0.16b, v0.16b, v6.16b // XOR input and result st1 {v0.16b}, [x1], #16 subs x17, x17, #1 // Update the counter. add w6, w6, #1 rev w7, w6 mov v7.s[3], w7 b.ls Lctr32_done Lctr32_prep_loop: // _vpaes_encrypt_core takes its input from v7, while _vpaes_encrypt_2x // uses v14 and v15. mov v15.16b, v7.16b mov v14.16b, v7.16b add w6, w6, #1 rev w7, w6 mov v15.s[3], w7 Lctr32_loop: ld1 {v6.16b,v7.16b}, [x0], #32 // Load input ahead of time bl _vpaes_encrypt_2x eor v0.16b, v0.16b, v6.16b // XOR input and result eor v1.16b, v1.16b, v7.16b // XOR input and result (#2) st1 {v0.16b,v1.16b}, [x1], #32 subs x17, x17, #2 // Update the counter. add w7, w6, #1 add w6, w6, #2 rev w7, w7 mov v14.s[3], w7 rev w7, w6 mov v15.s[3], w7 b.hi Lctr32_loop Lctr32_done: ldp d14,d15,[sp],#16 ldp d12,d13,[sp],#16 ldp d10,d11,[sp],#16 ldp d8,d9,[sp],#16 ldp x29,x30,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
10,904
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/ghash-neon-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .text .globl _gcm_init_neon .private_extern _gcm_init_neon .align 4 _gcm_init_neon: AARCH64_VALID_CALL_TARGET // This function is adapted from gcm_init_v8. xC2 is t3. ld1 {v17.2d}, [x1] // load H movi v19.16b, #0xe1 shl v19.2d, v19.2d, #57 // 0xc2.0 ext v3.16b, v17.16b, v17.16b, #8 ushr v18.2d, v19.2d, #63 dup v17.4s, v17.s[1] ext v16.16b, v18.16b, v19.16b, #8 // t0=0xc2....01 ushr v18.2d, v3.2d, #63 sshr v17.4s, v17.4s, #31 // broadcast carry bit and v18.16b, v18.16b, v16.16b shl v3.2d, v3.2d, #1 ext v18.16b, v18.16b, v18.16b, #8 and v16.16b, v16.16b, v17.16b orr v3.16b, v3.16b, v18.16b // H<<<=1 eor v5.16b, v3.16b, v16.16b // twisted H st1 {v5.2d}, [x0] // store Htable[0] ret .globl _gcm_gmult_neon .private_extern _gcm_gmult_neon .align 4 _gcm_gmult_neon: AARCH64_VALID_CALL_TARGET ld1 {v3.16b}, [x0] // load Xi ld1 {v5.1d}, [x1], #8 // load twisted H ld1 {v6.1d}, [x1] adrp x9, Lmasks@PAGE // load constants add x9, x9, Lmasks@PAGEOFF ld1 {v24.2d, v25.2d}, [x9] rev64 v3.16b, v3.16b // byteswap Xi ext v3.16b, v3.16b, v3.16b, #8 eor v7.8b, v5.8b, v6.8b // Karatsuba pre-processing mov x3, #16 b Lgmult_neon .globl _gcm_ghash_neon .private_extern _gcm_ghash_neon .align 4 _gcm_ghash_neon: AARCH64_VALID_CALL_TARGET ld1 {v0.16b}, [x0] // load Xi ld1 {v5.1d}, [x1], #8 // load twisted H ld1 {v6.1d}, [x1] adrp x9, Lmasks@PAGE // load constants add x9, x9, Lmasks@PAGEOFF ld1 {v24.2d, v25.2d}, [x9] rev64 v0.16b, v0.16b // byteswap Xi ext v0.16b, v0.16b, v0.16b, #8 eor v7.8b, v5.8b, v6.8b // Karatsuba pre-processing Loop_neon: ld1 {v3.16b}, [x2], #16 // load inp rev64 v3.16b, v3.16b // byteswap inp ext v3.16b, v3.16b, v3.16b, #8 eor v3.16b, v3.16b, v0.16b // inp ^= Xi Lgmult_neon: // Split the input into v3 and v4. (The upper halves are unused, // so it is okay to leave them alone.) ins v4.d[0], v3.d[1] ext v16.8b, v5.8b, v5.8b, #1 // A1 pmull v16.8h, v16.8b, v3.8b // F = A1*B ext v0.8b, v3.8b, v3.8b, #1 // B1 pmull v0.8h, v5.8b, v0.8b // E = A*B1 ext v17.8b, v5.8b, v5.8b, #2 // A2 pmull v17.8h, v17.8b, v3.8b // H = A2*B ext v19.8b, v3.8b, v3.8b, #2 // B2 pmull v19.8h, v5.8b, v19.8b // G = A*B2 ext v18.8b, v5.8b, v5.8b, #3 // A3 eor v16.16b, v16.16b, v0.16b // L = E + F pmull v18.8h, v18.8b, v3.8b // J = A3*B ext v0.8b, v3.8b, v3.8b, #3 // B3 eor v17.16b, v17.16b, v19.16b // M = G + H pmull v0.8h, v5.8b, v0.8b // I = A*B3 // Here we diverge from the 32-bit version. It computes the following // (instructions reordered for clarity): // // veor $t0#lo, $t0#lo, $t0#hi @ t0 = P0 + P1 (L) // vand $t0#hi, $t0#hi, $k48 // veor $t0#lo, $t0#lo, $t0#hi // // veor $t1#lo, $t1#lo, $t1#hi @ t1 = P2 + P3 (M) // vand $t1#hi, $t1#hi, $k32 // veor $t1#lo, $t1#lo, $t1#hi // // veor $t2#lo, $t2#lo, $t2#hi @ t2 = P4 + P5 (N) // vand $t2#hi, $t2#hi, $k16 // veor $t2#lo, $t2#lo, $t2#hi // // veor $t3#lo, $t3#lo, $t3#hi @ t3 = P6 + P7 (K) // vmov.i64 $t3#hi, #0 // // $kN is a mask with the bottom N bits set. AArch64 cannot compute on // upper halves of SIMD registers, so we must split each half into // separate registers. To compensate, we pair computations up and // parallelize. ext v19.8b, v3.8b, v3.8b, #4 // B4 eor v18.16b, v18.16b, v0.16b // N = I + J pmull v19.8h, v5.8b, v19.8b // K = A*B4 // This can probably be scheduled more efficiently. For now, we just // pair up independent instructions. zip1 v20.2d, v16.2d, v17.2d zip1 v22.2d, v18.2d, v19.2d zip2 v21.2d, v16.2d, v17.2d zip2 v23.2d, v18.2d, v19.2d eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b and v21.16b, v21.16b, v24.16b and v23.16b, v23.16b, v25.16b eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b zip1 v16.2d, v20.2d, v21.2d zip1 v18.2d, v22.2d, v23.2d zip2 v17.2d, v20.2d, v21.2d zip2 v19.2d, v22.2d, v23.2d ext v16.16b, v16.16b, v16.16b, #15 // t0 = t0 << 8 ext v17.16b, v17.16b, v17.16b, #14 // t1 = t1 << 16 pmull v0.8h, v5.8b, v3.8b // D = A*B ext v19.16b, v19.16b, v19.16b, #12 // t3 = t3 << 32 ext v18.16b, v18.16b, v18.16b, #13 // t2 = t2 << 24 eor v16.16b, v16.16b, v17.16b eor v18.16b, v18.16b, v19.16b eor v0.16b, v0.16b, v16.16b eor v0.16b, v0.16b, v18.16b eor v3.8b, v3.8b, v4.8b // Karatsuba pre-processing ext v16.8b, v7.8b, v7.8b, #1 // A1 pmull v16.8h, v16.8b, v3.8b // F = A1*B ext v1.8b, v3.8b, v3.8b, #1 // B1 pmull v1.8h, v7.8b, v1.8b // E = A*B1 ext v17.8b, v7.8b, v7.8b, #2 // A2 pmull v17.8h, v17.8b, v3.8b // H = A2*B ext v19.8b, v3.8b, v3.8b, #2 // B2 pmull v19.8h, v7.8b, v19.8b // G = A*B2 ext v18.8b, v7.8b, v7.8b, #3 // A3 eor v16.16b, v16.16b, v1.16b // L = E + F pmull v18.8h, v18.8b, v3.8b // J = A3*B ext v1.8b, v3.8b, v3.8b, #3 // B3 eor v17.16b, v17.16b, v19.16b // M = G + H pmull v1.8h, v7.8b, v1.8b // I = A*B3 // Here we diverge from the 32-bit version. It computes the following // (instructions reordered for clarity): // // veor $t0#lo, $t0#lo, $t0#hi @ t0 = P0 + P1 (L) // vand $t0#hi, $t0#hi, $k48 // veor $t0#lo, $t0#lo, $t0#hi // // veor $t1#lo, $t1#lo, $t1#hi @ t1 = P2 + P3 (M) // vand $t1#hi, $t1#hi, $k32 // veor $t1#lo, $t1#lo, $t1#hi // // veor $t2#lo, $t2#lo, $t2#hi @ t2 = P4 + P5 (N) // vand $t2#hi, $t2#hi, $k16 // veor $t2#lo, $t2#lo, $t2#hi // // veor $t3#lo, $t3#lo, $t3#hi @ t3 = P6 + P7 (K) // vmov.i64 $t3#hi, #0 // // $kN is a mask with the bottom N bits set. AArch64 cannot compute on // upper halves of SIMD registers, so we must split each half into // separate registers. To compensate, we pair computations up and // parallelize. ext v19.8b, v3.8b, v3.8b, #4 // B4 eor v18.16b, v18.16b, v1.16b // N = I + J pmull v19.8h, v7.8b, v19.8b // K = A*B4 // This can probably be scheduled more efficiently. For now, we just // pair up independent instructions. zip1 v20.2d, v16.2d, v17.2d zip1 v22.2d, v18.2d, v19.2d zip2 v21.2d, v16.2d, v17.2d zip2 v23.2d, v18.2d, v19.2d eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b and v21.16b, v21.16b, v24.16b and v23.16b, v23.16b, v25.16b eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b zip1 v16.2d, v20.2d, v21.2d zip1 v18.2d, v22.2d, v23.2d zip2 v17.2d, v20.2d, v21.2d zip2 v19.2d, v22.2d, v23.2d ext v16.16b, v16.16b, v16.16b, #15 // t0 = t0 << 8 ext v17.16b, v17.16b, v17.16b, #14 // t1 = t1 << 16 pmull v1.8h, v7.8b, v3.8b // D = A*B ext v19.16b, v19.16b, v19.16b, #12 // t3 = t3 << 32 ext v18.16b, v18.16b, v18.16b, #13 // t2 = t2 << 24 eor v16.16b, v16.16b, v17.16b eor v18.16b, v18.16b, v19.16b eor v1.16b, v1.16b, v16.16b eor v1.16b, v1.16b, v18.16b ext v16.8b, v6.8b, v6.8b, #1 // A1 pmull v16.8h, v16.8b, v4.8b // F = A1*B ext v2.8b, v4.8b, v4.8b, #1 // B1 pmull v2.8h, v6.8b, v2.8b // E = A*B1 ext v17.8b, v6.8b, v6.8b, #2 // A2 pmull v17.8h, v17.8b, v4.8b // H = A2*B ext v19.8b, v4.8b, v4.8b, #2 // B2 pmull v19.8h, v6.8b, v19.8b // G = A*B2 ext v18.8b, v6.8b, v6.8b, #3 // A3 eor v16.16b, v16.16b, v2.16b // L = E + F pmull v18.8h, v18.8b, v4.8b // J = A3*B ext v2.8b, v4.8b, v4.8b, #3 // B3 eor v17.16b, v17.16b, v19.16b // M = G + H pmull v2.8h, v6.8b, v2.8b // I = A*B3 // Here we diverge from the 32-bit version. It computes the following // (instructions reordered for clarity): // // veor $t0#lo, $t0#lo, $t0#hi @ t0 = P0 + P1 (L) // vand $t0#hi, $t0#hi, $k48 // veor $t0#lo, $t0#lo, $t0#hi // // veor $t1#lo, $t1#lo, $t1#hi @ t1 = P2 + P3 (M) // vand $t1#hi, $t1#hi, $k32 // veor $t1#lo, $t1#lo, $t1#hi // // veor $t2#lo, $t2#lo, $t2#hi @ t2 = P4 + P5 (N) // vand $t2#hi, $t2#hi, $k16 // veor $t2#lo, $t2#lo, $t2#hi // // veor $t3#lo, $t3#lo, $t3#hi @ t3 = P6 + P7 (K) // vmov.i64 $t3#hi, #0 // // $kN is a mask with the bottom N bits set. AArch64 cannot compute on // upper halves of SIMD registers, so we must split each half into // separate registers. To compensate, we pair computations up and // parallelize. ext v19.8b, v4.8b, v4.8b, #4 // B4 eor v18.16b, v18.16b, v2.16b // N = I + J pmull v19.8h, v6.8b, v19.8b // K = A*B4 // This can probably be scheduled more efficiently. For now, we just // pair up independent instructions. zip1 v20.2d, v16.2d, v17.2d zip1 v22.2d, v18.2d, v19.2d zip2 v21.2d, v16.2d, v17.2d zip2 v23.2d, v18.2d, v19.2d eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b and v21.16b, v21.16b, v24.16b and v23.16b, v23.16b, v25.16b eor v20.16b, v20.16b, v21.16b eor v22.16b, v22.16b, v23.16b zip1 v16.2d, v20.2d, v21.2d zip1 v18.2d, v22.2d, v23.2d zip2 v17.2d, v20.2d, v21.2d zip2 v19.2d, v22.2d, v23.2d ext v16.16b, v16.16b, v16.16b, #15 // t0 = t0 << 8 ext v17.16b, v17.16b, v17.16b, #14 // t1 = t1 << 16 pmull v2.8h, v6.8b, v4.8b // D = A*B ext v19.16b, v19.16b, v19.16b, #12 // t3 = t3 << 32 ext v18.16b, v18.16b, v18.16b, #13 // t2 = t2 << 24 eor v16.16b, v16.16b, v17.16b eor v18.16b, v18.16b, v19.16b eor v2.16b, v2.16b, v16.16b eor v2.16b, v2.16b, v18.16b ext v16.16b, v0.16b, v2.16b, #8 eor v1.16b, v1.16b, v0.16b // Karatsuba post-processing eor v1.16b, v1.16b, v2.16b eor v1.16b, v1.16b, v16.16b // Xm overlaps Xh.lo and Xl.hi ins v0.d[1], v1.d[0] // Xh|Xl - 256-bit result // This is a no-op due to the ins instruction below. // ins v2.d[0], v1.d[1] // equivalent of reduction_avx from ghash-x86_64.pl shl v17.2d, v0.2d, #57 // 1st phase shl v18.2d, v0.2d, #62 eor v18.16b, v18.16b, v17.16b // shl v17.2d, v0.2d, #63 eor v18.16b, v18.16b, v17.16b // // Note Xm contains {Xl.d[1], Xh.d[0]}. eor v18.16b, v18.16b, v1.16b ins v0.d[1], v18.d[0] // Xl.d[1] ^= t2.d[0] ins v2.d[0], v18.d[1] // Xh.d[0] ^= t2.d[1] ushr v18.2d, v0.2d, #1 // 2nd phase eor v2.16b, v2.16b,v0.16b eor v0.16b, v0.16b,v18.16b // ushr v18.2d, v18.2d, #6 ushr v0.2d, v0.2d, #1 // eor v0.16b, v0.16b, v2.16b // eor v0.16b, v0.16b, v18.16b // subs x3, x3, #16 bne Loop_neon rev64 v0.16b, v0.16b // byteswap Xi and write ext v0.16b, v0.16b, v0.16b, #8 st1 {v0.16b}, [x0] ret .section __TEXT,__const .align 4 Lmasks: .quad 0x0000ffffffffffff // k48 .quad 0x00000000ffffffff // k32 .quad 0x000000000000ffff // k16 .quad 0x0000000000000000 // k0 .byte 71,72,65,83,72,32,102,111,114,32,65,82,77,118,56,44,32,100,101,114,105,118,101,100,32,102,114,111,109,32,65,82,77,118,52,32,118,101,114,115,105,111,110,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
28,225
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/sha1-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .text .globl _sha1_block_data_order_nohw .private_extern _sha1_block_data_order_nohw .align 6 _sha1_block_data_order_nohw: // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-96]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] ldp w20,w21,[x0] ldp w22,w23,[x0,#8] ldr w24,[x0,#16] Loop: ldr x3,[x1],#64 movz w28,#0x7999 sub x2,x2,#1 movk w28,#0x5a82,lsl#16 #ifdef __AARCH64EB__ ror x3,x3,#32 #else rev32 x3,x3 #endif add w24,w24,w28 // warm it up add w24,w24,w3 lsr x4,x3,#32 ldr x5,[x1,#-56] bic w25,w23,w21 and w26,w22,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K orr w25,w25,w26 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 add w23,w23,w4 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x5,x5,#32 #else rev32 x5,x5 #endif bic w25,w22,w20 and w26,w21,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K orr w25,w25,w26 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 add w22,w22,w5 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) lsr x6,x5,#32 ldr x7,[x1,#-48] bic w25,w21,w24 and w26,w20,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K orr w25,w25,w26 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 add w21,w21,w6 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x7,x7,#32 #else rev32 x7,x7 #endif bic w25,w20,w23 and w26,w24,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K orr w25,w25,w26 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 add w20,w20,w7 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) lsr x8,x7,#32 ldr x9,[x1,#-40] bic w25,w24,w22 and w26,w23,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K orr w25,w25,w26 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 add w24,w24,w8 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x9,x9,#32 #else rev32 x9,x9 #endif bic w25,w23,w21 and w26,w22,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K orr w25,w25,w26 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 add w23,w23,w9 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) lsr x10,x9,#32 ldr x11,[x1,#-32] bic w25,w22,w20 and w26,w21,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K orr w25,w25,w26 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 add w22,w22,w10 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x11,x11,#32 #else rev32 x11,x11 #endif bic w25,w21,w24 and w26,w20,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K orr w25,w25,w26 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 add w21,w21,w11 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) lsr x12,x11,#32 ldr x13,[x1,#-24] bic w25,w20,w23 and w26,w24,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K orr w25,w25,w26 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 add w20,w20,w12 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x13,x13,#32 #else rev32 x13,x13 #endif bic w25,w24,w22 and w26,w23,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K orr w25,w25,w26 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 add w24,w24,w13 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) lsr x14,x13,#32 ldr x15,[x1,#-16] bic w25,w23,w21 and w26,w22,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K orr w25,w25,w26 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 add w23,w23,w14 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x15,x15,#32 #else rev32 x15,x15 #endif bic w25,w22,w20 and w26,w21,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K orr w25,w25,w26 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 add w22,w22,w15 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) lsr x16,x15,#32 ldr x17,[x1,#-8] bic w25,w21,w24 and w26,w20,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K orr w25,w25,w26 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 add w21,w21,w16 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) #ifdef __AARCH64EB__ ror x17,x17,#32 #else rev32 x17,x17 #endif bic w25,w20,w23 and w26,w24,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K orr w25,w25,w26 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 add w20,w20,w17 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) lsr x19,x17,#32 eor w3,w3,w5 bic w25,w24,w22 and w26,w23,w22 ror w27,w21,#27 eor w3,w3,w11 add w24,w24,w28 // future e+=K orr w25,w25,w26 add w20,w20,w27 // e+=rot(a,5) eor w3,w3,w16 ror w22,w22,#2 add w24,w24,w19 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w3,w3,#31 eor w4,w4,w6 bic w25,w23,w21 and w26,w22,w21 ror w27,w20,#27 eor w4,w4,w12 add w23,w23,w28 // future e+=K orr w25,w25,w26 add w24,w24,w27 // e+=rot(a,5) eor w4,w4,w17 ror w21,w21,#2 add w23,w23,w3 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w4,w4,#31 eor w5,w5,w7 bic w25,w22,w20 and w26,w21,w20 ror w27,w24,#27 eor w5,w5,w13 add w22,w22,w28 // future e+=K orr w25,w25,w26 add w23,w23,w27 // e+=rot(a,5) eor w5,w5,w19 ror w20,w20,#2 add w22,w22,w4 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w5,w5,#31 eor w6,w6,w8 bic w25,w21,w24 and w26,w20,w24 ror w27,w23,#27 eor w6,w6,w14 add w21,w21,w28 // future e+=K orr w25,w25,w26 add w22,w22,w27 // e+=rot(a,5) eor w6,w6,w3 ror w24,w24,#2 add w21,w21,w5 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w6,w6,#31 eor w7,w7,w9 bic w25,w20,w23 and w26,w24,w23 ror w27,w22,#27 eor w7,w7,w15 add w20,w20,w28 // future e+=K orr w25,w25,w26 add w21,w21,w27 // e+=rot(a,5) eor w7,w7,w4 ror w23,w23,#2 add w20,w20,w6 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w7,w7,#31 movz w28,#0xeba1 movk w28,#0x6ed9,lsl#16 eor w8,w8,w10 bic w25,w24,w22 and w26,w23,w22 ror w27,w21,#27 eor w8,w8,w16 add w24,w24,w28 // future e+=K orr w25,w25,w26 add w20,w20,w27 // e+=rot(a,5) eor w8,w8,w5 ror w22,w22,#2 add w24,w24,w7 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w8,w8,#31 eor w9,w9,w11 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w9,w9,w17 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w9,w9,w6 add w23,w23,w8 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w9,w9,#31 eor w10,w10,w12 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w10,w10,w19 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w10,w10,w7 add w22,w22,w9 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w10,w10,#31 eor w11,w11,w13 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w11,w11,w3 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w11,w11,w8 add w21,w21,w10 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w11,w11,#31 eor w12,w12,w14 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w12,w12,w4 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w12,w12,w9 add w20,w20,w11 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w12,w12,#31 eor w13,w13,w15 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w13,w13,w5 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w13,w13,w10 add w24,w24,w12 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w13,w13,#31 eor w14,w14,w16 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w14,w14,w6 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w14,w14,w11 add w23,w23,w13 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w14,w14,#31 eor w15,w15,w17 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w15,w15,w7 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w15,w15,w12 add w22,w22,w14 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w15,w15,#31 eor w16,w16,w19 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w16,w16,w8 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w16,w16,w13 add w21,w21,w15 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w16,w16,#31 eor w17,w17,w3 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w17,w17,w9 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w17,w17,w14 add w20,w20,w16 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w17,w17,#31 eor w19,w19,w4 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w19,w19,w10 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w19,w19,w15 add w24,w24,w17 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w19,w19,#31 eor w3,w3,w5 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w3,w3,w11 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w3,w3,w16 add w23,w23,w19 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w3,w3,#31 eor w4,w4,w6 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w4,w4,w12 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w4,w4,w17 add w22,w22,w3 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w4,w4,#31 eor w5,w5,w7 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w5,w5,w13 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w5,w5,w19 add w21,w21,w4 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w5,w5,#31 eor w6,w6,w8 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w6,w6,w14 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w6,w6,w3 add w20,w20,w5 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w6,w6,#31 eor w7,w7,w9 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w7,w7,w15 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w7,w7,w4 add w24,w24,w6 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w7,w7,#31 eor w8,w8,w10 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w8,w8,w16 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w8,w8,w5 add w23,w23,w7 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w8,w8,#31 eor w9,w9,w11 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w9,w9,w17 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w9,w9,w6 add w22,w22,w8 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w9,w9,#31 eor w10,w10,w12 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w10,w10,w19 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w10,w10,w7 add w21,w21,w9 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w10,w10,#31 eor w11,w11,w13 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w11,w11,w3 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w11,w11,w8 add w20,w20,w10 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w11,w11,#31 movz w28,#0xbcdc movk w28,#0x8f1b,lsl#16 eor w12,w12,w14 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w12,w12,w4 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w12,w12,w9 add w24,w24,w11 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w12,w12,#31 orr w25,w21,w22 and w26,w21,w22 eor w13,w13,w15 ror w27,w20,#27 and w25,w25,w23 add w23,w23,w28 // future e+=K eor w13,w13,w5 add w24,w24,w27 // e+=rot(a,5) orr w25,w25,w26 ror w21,w21,#2 eor w13,w13,w10 add w23,w23,w12 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w13,w13,#31 orr w25,w20,w21 and w26,w20,w21 eor w14,w14,w16 ror w27,w24,#27 and w25,w25,w22 add w22,w22,w28 // future e+=K eor w14,w14,w6 add w23,w23,w27 // e+=rot(a,5) orr w25,w25,w26 ror w20,w20,#2 eor w14,w14,w11 add w22,w22,w13 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w14,w14,#31 orr w25,w24,w20 and w26,w24,w20 eor w15,w15,w17 ror w27,w23,#27 and w25,w25,w21 add w21,w21,w28 // future e+=K eor w15,w15,w7 add w22,w22,w27 // e+=rot(a,5) orr w25,w25,w26 ror w24,w24,#2 eor w15,w15,w12 add w21,w21,w14 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w15,w15,#31 orr w25,w23,w24 and w26,w23,w24 eor w16,w16,w19 ror w27,w22,#27 and w25,w25,w20 add w20,w20,w28 // future e+=K eor w16,w16,w8 add w21,w21,w27 // e+=rot(a,5) orr w25,w25,w26 ror w23,w23,#2 eor w16,w16,w13 add w20,w20,w15 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w16,w16,#31 orr w25,w22,w23 and w26,w22,w23 eor w17,w17,w3 ror w27,w21,#27 and w25,w25,w24 add w24,w24,w28 // future e+=K eor w17,w17,w9 add w20,w20,w27 // e+=rot(a,5) orr w25,w25,w26 ror w22,w22,#2 eor w17,w17,w14 add w24,w24,w16 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w17,w17,#31 orr w25,w21,w22 and w26,w21,w22 eor w19,w19,w4 ror w27,w20,#27 and w25,w25,w23 add w23,w23,w28 // future e+=K eor w19,w19,w10 add w24,w24,w27 // e+=rot(a,5) orr w25,w25,w26 ror w21,w21,#2 eor w19,w19,w15 add w23,w23,w17 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w19,w19,#31 orr w25,w20,w21 and w26,w20,w21 eor w3,w3,w5 ror w27,w24,#27 and w25,w25,w22 add w22,w22,w28 // future e+=K eor w3,w3,w11 add w23,w23,w27 // e+=rot(a,5) orr w25,w25,w26 ror w20,w20,#2 eor w3,w3,w16 add w22,w22,w19 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w3,w3,#31 orr w25,w24,w20 and w26,w24,w20 eor w4,w4,w6 ror w27,w23,#27 and w25,w25,w21 add w21,w21,w28 // future e+=K eor w4,w4,w12 add w22,w22,w27 // e+=rot(a,5) orr w25,w25,w26 ror w24,w24,#2 eor w4,w4,w17 add w21,w21,w3 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w4,w4,#31 orr w25,w23,w24 and w26,w23,w24 eor w5,w5,w7 ror w27,w22,#27 and w25,w25,w20 add w20,w20,w28 // future e+=K eor w5,w5,w13 add w21,w21,w27 // e+=rot(a,5) orr w25,w25,w26 ror w23,w23,#2 eor w5,w5,w19 add w20,w20,w4 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w5,w5,#31 orr w25,w22,w23 and w26,w22,w23 eor w6,w6,w8 ror w27,w21,#27 and w25,w25,w24 add w24,w24,w28 // future e+=K eor w6,w6,w14 add w20,w20,w27 // e+=rot(a,5) orr w25,w25,w26 ror w22,w22,#2 eor w6,w6,w3 add w24,w24,w5 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w6,w6,#31 orr w25,w21,w22 and w26,w21,w22 eor w7,w7,w9 ror w27,w20,#27 and w25,w25,w23 add w23,w23,w28 // future e+=K eor w7,w7,w15 add w24,w24,w27 // e+=rot(a,5) orr w25,w25,w26 ror w21,w21,#2 eor w7,w7,w4 add w23,w23,w6 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w7,w7,#31 orr w25,w20,w21 and w26,w20,w21 eor w8,w8,w10 ror w27,w24,#27 and w25,w25,w22 add w22,w22,w28 // future e+=K eor w8,w8,w16 add w23,w23,w27 // e+=rot(a,5) orr w25,w25,w26 ror w20,w20,#2 eor w8,w8,w5 add w22,w22,w7 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w8,w8,#31 orr w25,w24,w20 and w26,w24,w20 eor w9,w9,w11 ror w27,w23,#27 and w25,w25,w21 add w21,w21,w28 // future e+=K eor w9,w9,w17 add w22,w22,w27 // e+=rot(a,5) orr w25,w25,w26 ror w24,w24,#2 eor w9,w9,w6 add w21,w21,w8 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w9,w9,#31 orr w25,w23,w24 and w26,w23,w24 eor w10,w10,w12 ror w27,w22,#27 and w25,w25,w20 add w20,w20,w28 // future e+=K eor w10,w10,w19 add w21,w21,w27 // e+=rot(a,5) orr w25,w25,w26 ror w23,w23,#2 eor w10,w10,w7 add w20,w20,w9 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w10,w10,#31 orr w25,w22,w23 and w26,w22,w23 eor w11,w11,w13 ror w27,w21,#27 and w25,w25,w24 add w24,w24,w28 // future e+=K eor w11,w11,w3 add w20,w20,w27 // e+=rot(a,5) orr w25,w25,w26 ror w22,w22,#2 eor w11,w11,w8 add w24,w24,w10 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w11,w11,#31 orr w25,w21,w22 and w26,w21,w22 eor w12,w12,w14 ror w27,w20,#27 and w25,w25,w23 add w23,w23,w28 // future e+=K eor w12,w12,w4 add w24,w24,w27 // e+=rot(a,5) orr w25,w25,w26 ror w21,w21,#2 eor w12,w12,w9 add w23,w23,w11 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w12,w12,#31 orr w25,w20,w21 and w26,w20,w21 eor w13,w13,w15 ror w27,w24,#27 and w25,w25,w22 add w22,w22,w28 // future e+=K eor w13,w13,w5 add w23,w23,w27 // e+=rot(a,5) orr w25,w25,w26 ror w20,w20,#2 eor w13,w13,w10 add w22,w22,w12 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w13,w13,#31 orr w25,w24,w20 and w26,w24,w20 eor w14,w14,w16 ror w27,w23,#27 and w25,w25,w21 add w21,w21,w28 // future e+=K eor w14,w14,w6 add w22,w22,w27 // e+=rot(a,5) orr w25,w25,w26 ror w24,w24,#2 eor w14,w14,w11 add w21,w21,w13 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w14,w14,#31 orr w25,w23,w24 and w26,w23,w24 eor w15,w15,w17 ror w27,w22,#27 and w25,w25,w20 add w20,w20,w28 // future e+=K eor w15,w15,w7 add w21,w21,w27 // e+=rot(a,5) orr w25,w25,w26 ror w23,w23,#2 eor w15,w15,w12 add w20,w20,w14 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w15,w15,#31 movz w28,#0xc1d6 movk w28,#0xca62,lsl#16 orr w25,w22,w23 and w26,w22,w23 eor w16,w16,w19 ror w27,w21,#27 and w25,w25,w24 add w24,w24,w28 // future e+=K eor w16,w16,w8 add w20,w20,w27 // e+=rot(a,5) orr w25,w25,w26 ror w22,w22,#2 eor w16,w16,w13 add w24,w24,w15 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w16,w16,#31 eor w17,w17,w3 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w17,w17,w9 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w17,w17,w14 add w23,w23,w16 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w17,w17,#31 eor w19,w19,w4 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w19,w19,w10 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w19,w19,w15 add w22,w22,w17 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w19,w19,#31 eor w3,w3,w5 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w3,w3,w11 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w3,w3,w16 add w21,w21,w19 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w3,w3,#31 eor w4,w4,w6 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w4,w4,w12 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w4,w4,w17 add w20,w20,w3 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w4,w4,#31 eor w5,w5,w7 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w5,w5,w13 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w5,w5,w19 add w24,w24,w4 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w5,w5,#31 eor w6,w6,w8 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w6,w6,w14 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w6,w6,w3 add w23,w23,w5 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w6,w6,#31 eor w7,w7,w9 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w7,w7,w15 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w7,w7,w4 add w22,w22,w6 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w7,w7,#31 eor w8,w8,w10 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w8,w8,w16 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w8,w8,w5 add w21,w21,w7 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w8,w8,#31 eor w9,w9,w11 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w9,w9,w17 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w9,w9,w6 add w20,w20,w8 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w9,w9,#31 eor w10,w10,w12 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w10,w10,w19 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w10,w10,w7 add w24,w24,w9 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w10,w10,#31 eor w11,w11,w13 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w11,w11,w3 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w11,w11,w8 add w23,w23,w10 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w11,w11,#31 eor w12,w12,w14 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w12,w12,w4 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w12,w12,w9 add w22,w22,w11 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w12,w12,#31 eor w13,w13,w15 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w13,w13,w5 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w13,w13,w10 add w21,w21,w12 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w13,w13,#31 eor w14,w14,w16 eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w14,w14,w6 eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 eor w14,w14,w11 add w20,w20,w13 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ror w14,w14,#31 eor w15,w15,w17 eor w25,w24,w22 ror w27,w21,#27 add w24,w24,w28 // future e+=K eor w15,w15,w7 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 eor w15,w15,w12 add w24,w24,w14 // future e+=X[i] add w20,w20,w25 // e+=F(b,c,d) ror w15,w15,#31 eor w16,w16,w19 eor w25,w23,w21 ror w27,w20,#27 add w23,w23,w28 // future e+=K eor w16,w16,w8 eor w25,w25,w22 add w24,w24,w27 // e+=rot(a,5) ror w21,w21,#2 eor w16,w16,w13 add w23,w23,w15 // future e+=X[i] add w24,w24,w25 // e+=F(b,c,d) ror w16,w16,#31 eor w17,w17,w3 eor w25,w22,w20 ror w27,w24,#27 add w22,w22,w28 // future e+=K eor w17,w17,w9 eor w25,w25,w21 add w23,w23,w27 // e+=rot(a,5) ror w20,w20,#2 eor w17,w17,w14 add w22,w22,w16 // future e+=X[i] add w23,w23,w25 // e+=F(b,c,d) ror w17,w17,#31 eor w19,w19,w4 eor w25,w21,w24 ror w27,w23,#27 add w21,w21,w28 // future e+=K eor w19,w19,w10 eor w25,w25,w20 add w22,w22,w27 // e+=rot(a,5) ror w24,w24,#2 eor w19,w19,w15 add w21,w21,w17 // future e+=X[i] add w22,w22,w25 // e+=F(b,c,d) ror w19,w19,#31 ldp w4,w5,[x0] eor w25,w20,w23 ror w27,w22,#27 add w20,w20,w28 // future e+=K eor w25,w25,w24 add w21,w21,w27 // e+=rot(a,5) ror w23,w23,#2 add w20,w20,w19 // future e+=X[i] add w21,w21,w25 // e+=F(b,c,d) ldp w6,w7,[x0,#8] eor w25,w24,w22 ror w27,w21,#27 eor w25,w25,w23 add w20,w20,w27 // e+=rot(a,5) ror w22,w22,#2 ldr w8,[x0,#16] add w20,w20,w25 // e+=F(b,c,d) add w21,w21,w5 add w22,w22,w6 add w20,w20,w4 add w23,w23,w7 add w24,w24,w8 stp w20,w21,[x0] stp w22,w23,[x0,#8] str w24,[x0,#16] cbnz x2,Loop ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] ldp x23,x24,[sp,#48] ldp x25,x26,[sp,#64] ldp x27,x28,[sp,#80] ldr x29,[sp],#96 ret .globl _sha1_block_data_order_hw .private_extern _sha1_block_data_order_hw .align 6 _sha1_block_data_order_hw: // Armv8.3-A PAuth: even though x30 is pushed to stack it is not popped later. AARCH64_VALID_CALL_TARGET stp x29,x30,[sp,#-16]! add x29,sp,#0 adrp x4,Lconst@PAGE add x4,x4,Lconst@PAGEOFF eor v1.16b,v1.16b,v1.16b ld1 {v0.4s},[x0],#16 ld1 {v1.s}[0],[x0] sub x0,x0,#16 ld1 {v16.4s,v17.4s,v18.4s,v19.4s},[x4] Loop_hw: ld1 {v4.16b,v5.16b,v6.16b,v7.16b},[x1],#64 sub x2,x2,#1 rev32 v4.16b,v4.16b rev32 v5.16b,v5.16b add v20.4s,v16.4s,v4.4s rev32 v6.16b,v6.16b orr v22.16b,v0.16b,v0.16b // offload add v21.4s,v16.4s,v5.4s rev32 v7.16b,v7.16b .long 0x5e280803 //sha1h v3.16b,v0.16b .long 0x5e140020 //sha1c v0.16b,v1.16b,v20.4s // 0 add v20.4s,v16.4s,v6.4s .long 0x5e0630a4 //sha1su0 v4.16b,v5.16b,v6.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 1 .long 0x5e150060 //sha1c v0.16b,v3.16b,v21.4s add v21.4s,v16.4s,v7.4s .long 0x5e2818e4 //sha1su1 v4.16b,v7.16b .long 0x5e0730c5 //sha1su0 v5.16b,v6.16b,v7.16b .long 0x5e280803 //sha1h v3.16b,v0.16b // 2 .long 0x5e140040 //sha1c v0.16b,v2.16b,v20.4s add v20.4s,v16.4s,v4.4s .long 0x5e281885 //sha1su1 v5.16b,v4.16b .long 0x5e0430e6 //sha1su0 v6.16b,v7.16b,v4.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 3 .long 0x5e150060 //sha1c v0.16b,v3.16b,v21.4s add v21.4s,v17.4s,v5.4s .long 0x5e2818a6 //sha1su1 v6.16b,v5.16b .long 0x5e053087 //sha1su0 v7.16b,v4.16b,v5.16b .long 0x5e280803 //sha1h v3.16b,v0.16b // 4 .long 0x5e140040 //sha1c v0.16b,v2.16b,v20.4s add v20.4s,v17.4s,v6.4s .long 0x5e2818c7 //sha1su1 v7.16b,v6.16b .long 0x5e0630a4 //sha1su0 v4.16b,v5.16b,v6.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 5 .long 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v17.4s,v7.4s .long 0x5e2818e4 //sha1su1 v4.16b,v7.16b .long 0x5e0730c5 //sha1su0 v5.16b,v6.16b,v7.16b .long 0x5e280803 //sha1h v3.16b,v0.16b // 6 .long 0x5e141040 //sha1p v0.16b,v2.16b,v20.4s add v20.4s,v17.4s,v4.4s .long 0x5e281885 //sha1su1 v5.16b,v4.16b .long 0x5e0430e6 //sha1su0 v6.16b,v7.16b,v4.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 7 .long 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v17.4s,v5.4s .long 0x5e2818a6 //sha1su1 v6.16b,v5.16b .long 0x5e053087 //sha1su0 v7.16b,v4.16b,v5.16b .long 0x5e280803 //sha1h v3.16b,v0.16b // 8 .long 0x5e141040 //sha1p v0.16b,v2.16b,v20.4s add v20.4s,v18.4s,v6.4s .long 0x5e2818c7 //sha1su1 v7.16b,v6.16b .long 0x5e0630a4 //sha1su0 v4.16b,v5.16b,v6.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 9 .long 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v18.4s,v7.4s .long 0x5e2818e4 //sha1su1 v4.16b,v7.16b .long 0x5e0730c5 //sha1su0 v5.16b,v6.16b,v7.16b .long 0x5e280803 //sha1h v3.16b,v0.16b // 10 .long 0x5e142040 //sha1m v0.16b,v2.16b,v20.4s add v20.4s,v18.4s,v4.4s .long 0x5e281885 //sha1su1 v5.16b,v4.16b .long 0x5e0430e6 //sha1su0 v6.16b,v7.16b,v4.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 11 .long 0x5e152060 //sha1m v0.16b,v3.16b,v21.4s add v21.4s,v18.4s,v5.4s .long 0x5e2818a6 //sha1su1 v6.16b,v5.16b .long 0x5e053087 //sha1su0 v7.16b,v4.16b,v5.16b .long 0x5e280803 //sha1h v3.16b,v0.16b // 12 .long 0x5e142040 //sha1m v0.16b,v2.16b,v20.4s add v20.4s,v18.4s,v6.4s .long 0x5e2818c7 //sha1su1 v7.16b,v6.16b .long 0x5e0630a4 //sha1su0 v4.16b,v5.16b,v6.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 13 .long 0x5e152060 //sha1m v0.16b,v3.16b,v21.4s add v21.4s,v19.4s,v7.4s .long 0x5e2818e4 //sha1su1 v4.16b,v7.16b .long 0x5e0730c5 //sha1su0 v5.16b,v6.16b,v7.16b .long 0x5e280803 //sha1h v3.16b,v0.16b // 14 .long 0x5e142040 //sha1m v0.16b,v2.16b,v20.4s add v20.4s,v19.4s,v4.4s .long 0x5e281885 //sha1su1 v5.16b,v4.16b .long 0x5e0430e6 //sha1su0 v6.16b,v7.16b,v4.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 15 .long 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v19.4s,v5.4s .long 0x5e2818a6 //sha1su1 v6.16b,v5.16b .long 0x5e053087 //sha1su0 v7.16b,v4.16b,v5.16b .long 0x5e280803 //sha1h v3.16b,v0.16b // 16 .long 0x5e141040 //sha1p v0.16b,v2.16b,v20.4s add v20.4s,v19.4s,v6.4s .long 0x5e2818c7 //sha1su1 v7.16b,v6.16b .long 0x5e280802 //sha1h v2.16b,v0.16b // 17 .long 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v21.4s,v19.4s,v7.4s .long 0x5e280803 //sha1h v3.16b,v0.16b // 18 .long 0x5e141040 //sha1p v0.16b,v2.16b,v20.4s .long 0x5e280802 //sha1h v2.16b,v0.16b // 19 .long 0x5e151060 //sha1p v0.16b,v3.16b,v21.4s add v1.4s,v1.4s,v2.4s add v0.4s,v0.4s,v22.4s cbnz x2,Loop_hw st1 {v0.4s},[x0],#16 st1 {v1.s}[0],[x0] ldr x29,[sp],#16 ret .section __TEXT,__const .align 6 Lconst: .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 //K_00_19 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 //K_20_39 .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc //K_40_59 .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 //K_60_79 .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
80,042
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/aesv8-gcm-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> #if __ARM_MAX_ARCH__ >= 8 .text .globl _aes_gcm_enc_kernel .private_extern _aes_gcm_enc_kernel .align 4 _aes_gcm_enc_kernel: #ifdef BORINGSSL_DISPATCH_TEST adrp x9,_BORINGSSL_function_hit@PAGE add x9, x9, _BORINGSSL_function_hit@PAGEOFF mov w10, #1 strb w10, [x9,#2] // kFlag_aes_gcm_enc_kernel #endif AARCH64_SIGN_LINK_REGISTER stp x29, x30, [sp, #-128]! mov x29, sp stp x19, x20, [sp, #16] mov x16, x4 mov x8, x5 stp x21, x22, [sp, #32] stp x23, x24, [sp, #48] stp d8, d9, [sp, #64] stp d10, d11, [sp, #80] stp d12, d13, [sp, #96] stp d14, d15, [sp, #112] ldr w17, [x8, #240] add x19, x8, x17, lsl #4 // borrow input_l1 for last key ldp x13, x14, [x19] // load round N keys ldr q31, [x19, #-16] // load round N-1 keys add x4, x0, x1, lsr #3 // end_input_ptr lsr x5, x1, #3 // byte_len mov x15, x5 ldp x10, x11, [x16] // ctr96_b64, ctr96_t32 ld1 { v0.16b}, [x16] // special case vector load initial counter so we can start first AES block as quickly as possible sub x5, x5, #1 // byte_len - 1 ldr q18, [x8, #0] // load rk0 and x5, x5, #0xffffffffffffffc0 // number of bytes to be processed in main loop (at least 1 byte must be handled by tail) ldr q25, [x8, #112] // load rk7 add x5, x5, x0 lsr x12, x11, #32 fmov d2, x10 // CTR block 2 orr w11, w11, w11 rev w12, w12 // rev_ctr32 fmov d1, x10 // CTR block 1 aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 0 - round 0 add w12, w12, #1 // increment rev_ctr32 rev w9, w12 // CTR block 1 fmov d3, x10 // CTR block 3 orr x9, x11, x9, lsl #32 // CTR block 1 add w12, w12, #1 // CTR block 1 ldr q19, [x8, #16] // load rk1 fmov v1.d[1], x9 // CTR block 1 rev w9, w12 // CTR block 2 add w12, w12, #1 // CTR block 2 orr x9, x11, x9, lsl #32 // CTR block 2 ldr q20, [x8, #32] // load rk2 fmov v2.d[1], x9 // CTR block 2 rev w9, w12 // CTR block 3 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 0 - round 1 orr x9, x11, x9, lsl #32 // CTR block 3 fmov v3.d[1], x9 // CTR block 3 aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 1 - round 0 ldr q21, [x8, #48] // load rk3 aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 0 - round 2 ldr q24, [x8, #96] // load rk6 aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 2 - round 0 ldr q23, [x8, #80] // load rk5 aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 1 - round 1 ldr q14, [x6, #48] // load h3l | h3h aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 3 - round 0 aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 2 - round 1 ldr q22, [x8, #64] // load rk4 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 1 - round 2 ldr q13, [x6, #32] // load h2l | h2h aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 3 - round 1 ldr q30, [x8, #192] // load rk12 aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 2 - round 2 ldr q15, [x6, #80] // load h4l | h4h aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 1 - round 3 ldr q29, [x8, #176] // load rk11 aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 3 - round 2 ldr q26, [x8, #128] // load rk8 aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 2 - round 3 add w12, w12, #1 // CTR block 3 aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 0 - round 3 aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 3 - round 3 ld1 { v11.16b}, [x3] ext v11.16b, v11.16b, v11.16b, #8 rev64 v11.16b, v11.16b aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 2 - round 4 aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 0 - round 4 aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 1 - round 4 aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 3 - round 4 cmp x17, #12 // setup flags for AES-128/192/256 check aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 0 - round 5 aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 1 - round 5 aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 3 - round 5 aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 2 - round 5 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 1 - round 6 trn2 v17.2d, v14.2d, v15.2d // h4l | h3l aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 3 - round 6 ldr q27, [x8, #144] // load rk9 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 0 - round 6 ldr q12, [x6] // load h1l | h1h aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 2 - round 6 ldr q28, [x8, #160] // load rk10 aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 1 - round 7 trn1 v9.2d, v14.2d, v15.2d // h4h | h3h aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 0 - round 7 aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 2 - round 7 aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 3 - round 7 trn2 v16.2d, v12.2d, v13.2d // h2l | h1l aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 1 - round 8 aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 2 - round 8 aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 3 - round 8 aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 0 - round 8 b.lt Lenc_finish_first_blocks // branch if AES-128 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 1 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 2 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 3 - round 9 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 0 - round 9 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 1 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 2 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 3 - round 10 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 0 - round 10 b.eq Lenc_finish_first_blocks // branch if AES-192 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 1 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 2 - round 11 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 0 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 3 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 1 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 2 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 0 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 3 - round 12 Lenc_finish_first_blocks: cmp x0, x5 // check if we have <= 4 blocks eor v17.16b, v17.16b, v9.16b // h4k | h3k aese v2.16b, v31.16b // AES block 2 - round N-1 trn1 v8.2d, v12.2d, v13.2d // h2h | h1h aese v1.16b, v31.16b // AES block 1 - round N-1 aese v0.16b, v31.16b // AES block 0 - round N-1 aese v3.16b, v31.16b // AES block 3 - round N-1 eor v16.16b, v16.16b, v8.16b // h2k | h1k b.ge Lenc_tail // handle tail ldp x19, x20, [x0, #16] // AES block 1 - load plaintext rev w9, w12 // CTR block 4 ldp x6, x7, [x0, #0] // AES block 0 - load plaintext ldp x23, x24, [x0, #48] // AES block 3 - load plaintext ldp x21, x22, [x0, #32] // AES block 2 - load plaintext add x0, x0, #64 // AES input_ptr update eor x19, x19, x13 // AES block 1 - round N low eor x20, x20, x14 // AES block 1 - round N high fmov d5, x19 // AES block 1 - mov low eor x6, x6, x13 // AES block 0 - round N low eor x7, x7, x14 // AES block 0 - round N high eor x24, x24, x14 // AES block 3 - round N high fmov d4, x6 // AES block 0 - mov low cmp x0, x5 // check if we have <= 8 blocks fmov v4.d[1], x7 // AES block 0 - mov high eor x23, x23, x13 // AES block 3 - round N low eor x21, x21, x13 // AES block 2 - round N low fmov v5.d[1], x20 // AES block 1 - mov high fmov d6, x21 // AES block 2 - mov low add w12, w12, #1 // CTR block 4 orr x9, x11, x9, lsl #32 // CTR block 4 fmov d7, x23 // AES block 3 - mov low eor x22, x22, x14 // AES block 2 - round N high fmov v6.d[1], x22 // AES block 2 - mov high eor v4.16b, v4.16b, v0.16b // AES block 0 - result fmov d0, x10 // CTR block 4 fmov v0.d[1], x9 // CTR block 4 rev w9, w12 // CTR block 5 add w12, w12, #1 // CTR block 5 eor v5.16b, v5.16b, v1.16b // AES block 1 - result fmov d1, x10 // CTR block 5 orr x9, x11, x9, lsl #32 // CTR block 5 fmov v1.d[1], x9 // CTR block 5 rev w9, w12 // CTR block 6 st1 { v4.16b}, [x2], #16 // AES block 0 - store result fmov v7.d[1], x24 // AES block 3 - mov high orr x9, x11, x9, lsl #32 // CTR block 6 eor v6.16b, v6.16b, v2.16b // AES block 2 - result st1 { v5.16b}, [x2], #16 // AES block 1 - store result add w12, w12, #1 // CTR block 6 fmov d2, x10 // CTR block 6 fmov v2.d[1], x9 // CTR block 6 st1 { v6.16b}, [x2], #16 // AES block 2 - store result rev w9, w12 // CTR block 7 orr x9, x11, x9, lsl #32 // CTR block 7 eor v7.16b, v7.16b, v3.16b // AES block 3 - result st1 { v7.16b}, [x2], #16 // AES block 3 - store result b.ge Lenc_prepretail // do prepretail Lenc_main_loop: // main loop start aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 0 rev64 v4.16b, v4.16b // GHASH block 4k (only t0 is free) aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 0 fmov d3, x10 // CTR block 4k+3 aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 0 ext v11.16b, v11.16b, v11.16b, #8 // PRE 0 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 1 fmov v3.d[1], x9 // CTR block 4k+3 aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 1 ldp x23, x24, [x0, #48] // AES block 4k+7 - load plaintext aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 1 ldp x21, x22, [x0, #32] // AES block 4k+6 - load plaintext aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 2 eor v4.16b, v4.16b, v11.16b // PRE 1 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 2 aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 0 eor x23, x23, x13 // AES block 4k+7 - round N low aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 3 mov d10, v17.d[1] // GHASH block 4k - mid pmull2 v9.1q, v4.2d, v15.2d // GHASH block 4k - high eor x22, x22, x14 // AES block 4k+6 - round N high mov d8, v4.d[1] // GHASH block 4k - mid aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 1 rev64 v5.16b, v5.16b // GHASH block 4k+1 (t0 and t1 free) aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 4 pmull v11.1q, v4.1d, v15.1d // GHASH block 4k - low eor v8.8b, v8.8b, v4.8b // GHASH block 4k - mid aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 2 aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 5 rev64 v7.16b, v7.16b // GHASH block 4k+3 (t0, t1, t2 and t3 free) pmull2 v4.1q, v5.2d, v14.2d // GHASH block 4k+1 - high pmull v10.1q, v8.1d, v10.1d // GHASH block 4k - mid rev64 v6.16b, v6.16b // GHASH block 4k+2 (t0, t1, and t2 free) pmull v8.1q, v5.1d, v14.1d // GHASH block 4k+1 - low eor v9.16b, v9.16b, v4.16b // GHASH block 4k+1 - high mov d4, v5.d[1] // GHASH block 4k+1 - mid aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 3 aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 2 eor v11.16b, v11.16b, v8.16b // GHASH block 4k+1 - low aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 3 aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 4 mov d8, v6.d[1] // GHASH block 4k+2 - mid aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 3 eor v4.8b, v4.8b, v5.8b // GHASH block 4k+1 - mid aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 4 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 6 eor v8.8b, v8.8b, v6.8b // GHASH block 4k+2 - mid aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 4 pmull v4.1q, v4.1d, v17.1d // GHASH block 4k+1 - mid aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 7 aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 5 ins v8.d[1], v8.d[0] // GHASH block 4k+2 - mid aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 5 aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 8 aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 5 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 6 eor v10.16b, v10.16b, v4.16b // GHASH block 4k+1 - mid pmull2 v4.1q, v6.2d, v13.2d // GHASH block 4k+2 - high pmull v5.1q, v6.1d, v13.1d // GHASH block 4k+2 - low aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 7 pmull v6.1q, v7.1d, v12.1d // GHASH block 4k+3 - low eor v9.16b, v9.16b, v4.16b // GHASH block 4k+2 - high aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 6 ldp x19, x20, [x0, #16] // AES block 4k+5 - load plaintext aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 8 mov d4, v7.d[1] // GHASH block 4k+3 - mid aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 6 eor v11.16b, v11.16b, v5.16b // GHASH block 4k+2 - low pmull2 v8.1q, v8.2d, v16.2d // GHASH block 4k+2 - mid pmull2 v5.1q, v7.2d, v12.2d // GHASH block 4k+3 - high eor v4.8b, v4.8b, v7.8b // GHASH block 4k+3 - mid aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 7 eor x19, x19, x13 // AES block 4k+5 - round N low aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 8 eor v10.16b, v10.16b, v8.16b // GHASH block 4k+2 - mid aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 7 eor x21, x21, x13 // AES block 4k+6 - round N low aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 8 movi v8.8b, #0xc2 pmull v4.1q, v4.1d, v16.1d // GHASH block 4k+3 - mid eor v9.16b, v9.16b, v5.16b // GHASH block 4k+3 - high cmp x17, #12 // setup flags for AES-128/192/256 check fmov d5, x19 // AES block 4k+5 - mov low ldp x6, x7, [x0, #0] // AES block 4k+4 - load plaintext b.lt Lenc_main_loop_continue // branch if AES-128 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 9 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 9 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 10 b.eq Lenc_main_loop_continue // branch if AES-192 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 11 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 12 Lenc_main_loop_continue: shl d8, d8, #56 // mod_constant eor v11.16b, v11.16b, v6.16b // GHASH block 4k+3 - low eor v10.16b, v10.16b, v4.16b // GHASH block 4k+3 - mid add w12, w12, #1 // CTR block 4k+3 eor v4.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up add x0, x0, #64 // AES input_ptr update pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid rev w9, w12 // CTR block 4k+8 ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor x6, x6, x13 // AES block 4k+4 - round N low eor v10.16b, v10.16b, v4.16b // MODULO - karatsuba tidy up eor x7, x7, x14 // AES block 4k+4 - round N high fmov d4, x6 // AES block 4k+4 - mov low orr x9, x11, x9, lsl #32 // CTR block 4k+8 eor v7.16b, v9.16b, v7.16b // MODULO - fold into mid eor x20, x20, x14 // AES block 4k+5 - round N high eor x24, x24, x14 // AES block 4k+7 - round N high add w12, w12, #1 // CTR block 4k+8 aese v0.16b, v31.16b // AES block 4k+4 - round N-1 fmov v4.d[1], x7 // AES block 4k+4 - mov high eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid fmov d7, x23 // AES block 4k+7 - mov low aese v1.16b, v31.16b // AES block 4k+5 - round N-1 fmov v5.d[1], x20 // AES block 4k+5 - mov high fmov d6, x21 // AES block 4k+6 - mov low cmp x0, x5 // LOOP CONTROL fmov v6.d[1], x22 // AES block 4k+6 - mov high pmull v9.1q, v10.1d, v8.1d // MODULO - mid 64b align with low eor v4.16b, v4.16b, v0.16b // AES block 4k+4 - result fmov d0, x10 // CTR block 4k+8 fmov v0.d[1], x9 // CTR block 4k+8 rev w9, w12 // CTR block 4k+9 add w12, w12, #1 // CTR block 4k+9 eor v5.16b, v5.16b, v1.16b // AES block 4k+5 - result fmov d1, x10 // CTR block 4k+9 orr x9, x11, x9, lsl #32 // CTR block 4k+9 fmov v1.d[1], x9 // CTR block 4k+9 aese v2.16b, v31.16b // AES block 4k+6 - round N-1 rev w9, w12 // CTR block 4k+10 st1 { v4.16b}, [x2], #16 // AES block 4k+4 - store result orr x9, x11, x9, lsl #32 // CTR block 4k+10 eor v11.16b, v11.16b, v9.16b // MODULO - fold into low fmov v7.d[1], x24 // AES block 4k+7 - mov high ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment st1 { v5.16b}, [x2], #16 // AES block 4k+5 - store result add w12, w12, #1 // CTR block 4k+10 aese v3.16b, v31.16b // AES block 4k+7 - round N-1 eor v6.16b, v6.16b, v2.16b // AES block 4k+6 - result fmov d2, x10 // CTR block 4k+10 st1 { v6.16b}, [x2], #16 // AES block 4k+6 - store result fmov v2.d[1], x9 // CTR block 4k+10 rev w9, w12 // CTR block 4k+11 eor v11.16b, v11.16b, v10.16b // MODULO - fold into low orr x9, x11, x9, lsl #32 // CTR block 4k+11 eor v7.16b, v7.16b, v3.16b // AES block 4k+7 - result st1 { v7.16b}, [x2], #16 // AES block 4k+7 - store result b.lt Lenc_main_loop Lenc_prepretail: // PREPRETAIL aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 0 rev64 v6.16b, v6.16b // GHASH block 4k+2 (t0, t1, and t2 free) aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 0 fmov d3, x10 // CTR block 4k+3 aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 0 rev64 v4.16b, v4.16b // GHASH block 4k (only t0 is free) fmov v3.d[1], x9 // CTR block 4k+3 ext v11.16b, v11.16b, v11.16b, #8 // PRE 0 aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 1 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 1 eor v4.16b, v4.16b, v11.16b // PRE 1 rev64 v5.16b, v5.16b // GHASH block 4k+1 (t0 and t1 free) aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 2 aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 0 mov d10, v17.d[1] // GHASH block 4k - mid aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 1 pmull v11.1q, v4.1d, v15.1d // GHASH block 4k - low mov d8, v4.d[1] // GHASH block 4k - mid pmull2 v9.1q, v4.2d, v15.2d // GHASH block 4k - high aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 3 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 2 eor v8.8b, v8.8b, v4.8b // GHASH block 4k - mid aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 2 aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 1 aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 3 pmull v10.1q, v8.1d, v10.1d // GHASH block 4k - mid pmull2 v4.1q, v5.2d, v14.2d // GHASH block 4k+1 - high pmull v8.1q, v5.1d, v14.1d // GHASH block 4k+1 - low aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 2 eor v9.16b, v9.16b, v4.16b // GHASH block 4k+1 - high mov d4, v5.d[1] // GHASH block 4k+1 - mid aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 3 eor v11.16b, v11.16b, v8.16b // GHASH block 4k+1 - low aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 3 eor v4.8b, v4.8b, v5.8b // GHASH block 4k+1 - mid mov d8, v6.d[1] // GHASH block 4k+2 - mid aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 4 rev64 v7.16b, v7.16b // GHASH block 4k+3 (t0, t1, t2 and t3 free) aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 4 pmull v4.1q, v4.1d, v17.1d // GHASH block 4k+1 - mid eor v8.8b, v8.8b, v6.8b // GHASH block 4k+2 - mid add w12, w12, #1 // CTR block 4k+3 pmull v5.1q, v6.1d, v13.1d // GHASH block 4k+2 - low aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 5 aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 4 eor v10.16b, v10.16b, v4.16b // GHASH block 4k+1 - mid pmull2 v4.1q, v6.2d, v13.2d // GHASH block 4k+2 - high eor v11.16b, v11.16b, v5.16b // GHASH block 4k+2 - low ins v8.d[1], v8.d[0] // GHASH block 4k+2 - mid aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 5 eor v9.16b, v9.16b, v4.16b // GHASH block 4k+2 - high mov d4, v7.d[1] // GHASH block 4k+3 - mid aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 4 pmull2 v8.1q, v8.2d, v16.2d // GHASH block 4k+2 - mid eor v4.8b, v4.8b, v7.8b // GHASH block 4k+3 - mid pmull2 v5.1q, v7.2d, v12.2d // GHASH block 4k+3 - high aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 5 pmull v4.1q, v4.1d, v16.1d // GHASH block 4k+3 - mid eor v10.16b, v10.16b, v8.16b // GHASH block 4k+2 - mid aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 5 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 6 aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 6 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 6 movi v8.8b, #0xc2 aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 6 aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 7 eor v9.16b, v9.16b, v5.16b // GHASH block 4k+3 - high aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 7 aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 7 shl d8, d8, #56 // mod_constant aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 8 eor v10.16b, v10.16b, v4.16b // GHASH block 4k+3 - mid pmull v6.1q, v7.1d, v12.1d // GHASH block 4k+3 - low aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 8 cmp x17, #12 // setup flags for AES-128/192/256 check aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 8 eor v11.16b, v11.16b, v6.16b // GHASH block 4k+3 - low aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 7 eor v10.16b, v10.16b, v9.16b // karatsuba tidy up aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 8 pmull v4.1q, v9.1d, v8.1d ext v9.16b, v9.16b, v9.16b, #8 eor v10.16b, v10.16b, v11.16b b.lt Lenc_finish_prepretail // branch if AES-128 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 9 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 9 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 10 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 10 b.eq Lenc_finish_prepretail // branch if AES-192 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 11 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 12 Lenc_finish_prepretail: eor v10.16b, v10.16b, v4.16b eor v10.16b, v10.16b, v9.16b pmull v4.1q, v10.1d, v8.1d ext v10.16b, v10.16b, v10.16b, #8 aese v1.16b, v31.16b // AES block 4k+5 - round N-1 eor v11.16b, v11.16b, v4.16b aese v3.16b, v31.16b // AES block 4k+7 - round N-1 aese v0.16b, v31.16b // AES block 4k+4 - round N-1 aese v2.16b, v31.16b // AES block 4k+6 - round N-1 eor v11.16b, v11.16b, v10.16b Lenc_tail: // TAIL ext v8.16b, v11.16b, v11.16b, #8 // prepare final partial tag sub x5, x4, x0 // main_end_input_ptr is number of bytes left to process ldp x6, x7, [x0], #16 // AES block 4k+4 - load plaintext eor x6, x6, x13 // AES block 4k+4 - round N low eor x7, x7, x14 // AES block 4k+4 - round N high cmp x5, #48 fmov d4, x6 // AES block 4k+4 - mov low fmov v4.d[1], x7 // AES block 4k+4 - mov high eor v5.16b, v4.16b, v0.16b // AES block 4k+4 - result b.gt Lenc_blocks_4_remaining cmp x5, #32 mov v3.16b, v2.16b movi v11.8b, #0 movi v9.8b, #0 sub w12, w12, #1 mov v2.16b, v1.16b movi v10.8b, #0 b.gt Lenc_blocks_3_remaining mov v3.16b, v1.16b sub w12, w12, #1 cmp x5, #16 b.gt Lenc_blocks_2_remaining sub w12, w12, #1 b Lenc_blocks_1_remaining Lenc_blocks_4_remaining: // blocks left = 4 st1 { v5.16b}, [x2], #16 // AES final-3 block - store result ldp x6, x7, [x0], #16 // AES final-2 block - load input low & high rev64 v4.16b, v5.16b // GHASH final-3 block eor x6, x6, x13 // AES final-2 block - round N low eor v4.16b, v4.16b, v8.16b // feed in partial tag eor x7, x7, x14 // AES final-2 block - round N high mov d22, v4.d[1] // GHASH final-3 block - mid fmov d5, x6 // AES final-2 block - mov low fmov v5.d[1], x7 // AES final-2 block - mov high eor v22.8b, v22.8b, v4.8b // GHASH final-3 block - mid movi v8.8b, #0 // suppress further partial tag feed in mov d10, v17.d[1] // GHASH final-3 block - mid pmull v11.1q, v4.1d, v15.1d // GHASH final-3 block - low pmull2 v9.1q, v4.2d, v15.2d // GHASH final-3 block - high pmull v10.1q, v22.1d, v10.1d // GHASH final-3 block - mid eor v5.16b, v5.16b, v1.16b // AES final-2 block - result Lenc_blocks_3_remaining: // blocks left = 3 st1 { v5.16b}, [x2], #16 // AES final-2 block - store result ldp x6, x7, [x0], #16 // AES final-1 block - load input low & high rev64 v4.16b, v5.16b // GHASH final-2 block eor x6, x6, x13 // AES final-1 block - round N low eor v4.16b, v4.16b, v8.16b // feed in partial tag fmov d5, x6 // AES final-1 block - mov low eor x7, x7, x14 // AES final-1 block - round N high fmov v5.d[1], x7 // AES final-1 block - mov high movi v8.8b, #0 // suppress further partial tag feed in pmull2 v20.1q, v4.2d, v14.2d // GHASH final-2 block - high mov d22, v4.d[1] // GHASH final-2 block - mid pmull v21.1q, v4.1d, v14.1d // GHASH final-2 block - low eor v22.8b, v22.8b, v4.8b // GHASH final-2 block - mid eor v5.16b, v5.16b, v2.16b // AES final-1 block - result eor v9.16b, v9.16b, v20.16b // GHASH final-2 block - high pmull v22.1q, v22.1d, v17.1d // GHASH final-2 block - mid eor v11.16b, v11.16b, v21.16b // GHASH final-2 block - low eor v10.16b, v10.16b, v22.16b // GHASH final-2 block - mid Lenc_blocks_2_remaining: // blocks left = 2 st1 { v5.16b}, [x2], #16 // AES final-1 block - store result rev64 v4.16b, v5.16b // GHASH final-1 block ldp x6, x7, [x0], #16 // AES final block - load input low & high eor v4.16b, v4.16b, v8.16b // feed in partial tag movi v8.8b, #0 // suppress further partial tag feed in eor x6, x6, x13 // AES final block - round N low mov d22, v4.d[1] // GHASH final-1 block - mid pmull2 v20.1q, v4.2d, v13.2d // GHASH final-1 block - high eor x7, x7, x14 // AES final block - round N high eor v22.8b, v22.8b, v4.8b // GHASH final-1 block - mid eor v9.16b, v9.16b, v20.16b // GHASH final-1 block - high ins v22.d[1], v22.d[0] // GHASH final-1 block - mid fmov d5, x6 // AES final block - mov low fmov v5.d[1], x7 // AES final block - mov high pmull2 v22.1q, v22.2d, v16.2d // GHASH final-1 block - mid pmull v21.1q, v4.1d, v13.1d // GHASH final-1 block - low eor v5.16b, v5.16b, v3.16b // AES final block - result eor v10.16b, v10.16b, v22.16b // GHASH final-1 block - mid eor v11.16b, v11.16b, v21.16b // GHASH final-1 block - low Lenc_blocks_1_remaining: // blocks_left = 1 rev64 v4.16b, v5.16b // GHASH final block eor v4.16b, v4.16b, v8.16b // feed in partial tag pmull2 v20.1q, v4.2d, v12.2d // GHASH final block - high mov d8, v4.d[1] // GHASH final block - mid rev w9, w12 pmull v21.1q, v4.1d, v12.1d // GHASH final block - low eor v9.16b, v9.16b, v20.16b // GHASH final block - high eor v8.8b, v8.8b, v4.8b // GHASH final block - mid pmull v8.1q, v8.1d, v16.1d // GHASH final block - mid eor v11.16b, v11.16b, v21.16b // GHASH final block - low eor v10.16b, v10.16b, v8.16b // GHASH final block - mid movi v8.8b, #0xc2 eor v4.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up shl d8, d8, #56 // mod_constant eor v10.16b, v10.16b, v4.16b // MODULO - karatsuba tidy up pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid eor v10.16b, v10.16b, v9.16b // MODULO - fold into mid pmull v9.1q, v10.1d, v8.1d // MODULO - mid 64b align with low ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment str w9, [x16, #12] // store the updated counter st1 { v5.16b}, [x2] // store all 16B eor v11.16b, v11.16b, v9.16b // MODULO - fold into low eor v11.16b, v11.16b, v10.16b // MODULO - fold into low ext v11.16b, v11.16b, v11.16b, #8 rev64 v11.16b, v11.16b mov x0, x15 st1 { v11.16b }, [x3] ldp x19, x20, [sp, #16] ldp x21, x22, [sp, #32] ldp x23, x24, [sp, #48] ldp d8, d9, [sp, #64] ldp d10, d11, [sp, #80] ldp d12, d13, [sp, #96] ldp d14, d15, [sp, #112] ldp x29, x30, [sp], #128 AARCH64_VALIDATE_LINK_REGISTER ret .globl _aes_gcm_dec_kernel .private_extern _aes_gcm_dec_kernel .align 4 _aes_gcm_dec_kernel: AARCH64_SIGN_LINK_REGISTER stp x29, x30, [sp, #-128]! mov x29, sp stp x19, x20, [sp, #16] mov x16, x4 mov x8, x5 stp x21, x22, [sp, #32] stp x23, x24, [sp, #48] stp d8, d9, [sp, #64] stp d10, d11, [sp, #80] stp d12, d13, [sp, #96] stp d14, d15, [sp, #112] ldr w17, [x8, #240] add x19, x8, x17, lsl #4 // borrow input_l1 for last key ldp x13, x14, [x19] // load round N keys ldr q31, [x19, #-16] // load round N-1 keys lsr x5, x1, #3 // byte_len mov x15, x5 ldp x10, x11, [x16] // ctr96_b64, ctr96_t32 ldr q26, [x8, #128] // load rk8 sub x5, x5, #1 // byte_len - 1 ldr q25, [x8, #112] // load rk7 and x5, x5, #0xffffffffffffffc0 // number of bytes to be processed in main loop (at least 1 byte must be handled by tail) add x4, x0, x1, lsr #3 // end_input_ptr ldr q24, [x8, #96] // load rk6 lsr x12, x11, #32 ldr q23, [x8, #80] // load rk5 orr w11, w11, w11 ldr q21, [x8, #48] // load rk3 add x5, x5, x0 rev w12, w12 // rev_ctr32 add w12, w12, #1 // increment rev_ctr32 fmov d3, x10 // CTR block 3 rev w9, w12 // CTR block 1 add w12, w12, #1 // CTR block 1 fmov d1, x10 // CTR block 1 orr x9, x11, x9, lsl #32 // CTR block 1 ld1 { v0.16b}, [x16] // special case vector load initial counter so we can start first AES block as quickly as possible fmov v1.d[1], x9 // CTR block 1 rev w9, w12 // CTR block 2 add w12, w12, #1 // CTR block 2 fmov d2, x10 // CTR block 2 orr x9, x11, x9, lsl #32 // CTR block 2 fmov v2.d[1], x9 // CTR block 2 rev w9, w12 // CTR block 3 orr x9, x11, x9, lsl #32 // CTR block 3 ldr q18, [x8, #0] // load rk0 fmov v3.d[1], x9 // CTR block 3 add w12, w12, #1 // CTR block 3 ldr q22, [x8, #64] // load rk4 ldr q19, [x8, #16] // load rk1 aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 0 - round 0 ldr q14, [x6, #48] // load h3l | h3h aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 3 - round 0 ldr q15, [x6, #80] // load h4l | h4h aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 1 - round 0 ldr q13, [x6, #32] // load h2l | h2h aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 2 - round 0 ldr q20, [x8, #32] // load rk2 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 0 - round 1 aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 1 - round 1 ld1 { v11.16b}, [x3] ext v11.16b, v11.16b, v11.16b, #8 rev64 v11.16b, v11.16b aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 2 - round 1 ldr q27, [x8, #144] // load rk9 aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 3 - round 1 ldr q30, [x8, #192] // load rk12 aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 0 - round 2 ldr q12, [x6] // load h1l | h1h aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 2 - round 2 ldr q28, [x8, #160] // load rk10 aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 3 - round 2 aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 0 - round 3 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 1 - round 2 aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 3 - round 3 aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 0 - round 4 aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 2 - round 3 aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 1 - round 3 aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 3 - round 4 aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 2 - round 4 aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 1 - round 4 aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 3 - round 5 aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 0 - round 5 aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 1 - round 5 aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 2 - round 5 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 0 - round 6 aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 3 - round 6 cmp x17, #12 // setup flags for AES-128/192/256 check aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 1 - round 6 aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 2 - round 6 aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 0 - round 7 aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 1 - round 7 aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 3 - round 7 aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 0 - round 8 aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 2 - round 7 aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 3 - round 8 aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 1 - round 8 ldr q29, [x8, #176] // load rk11 aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 2 - round 8 b.lt Ldec_finish_first_blocks // branch if AES-128 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 0 - round 9 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 1 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 3 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 2 - round 9 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 0 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 1 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 3 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 2 - round 10 b.eq Ldec_finish_first_blocks // branch if AES-192 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 0 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 3 - round 11 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 1 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 2 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 1 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 0 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 2 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 3 - round 12 Ldec_finish_first_blocks: cmp x0, x5 // check if we have <= 4 blocks trn1 v9.2d, v14.2d, v15.2d // h4h | h3h trn2 v17.2d, v14.2d, v15.2d // h4l | h3l trn1 v8.2d, v12.2d, v13.2d // h2h | h1h trn2 v16.2d, v12.2d, v13.2d // h2l | h1l eor v17.16b, v17.16b, v9.16b // h4k | h3k aese v1.16b, v31.16b // AES block 1 - round N-1 aese v2.16b, v31.16b // AES block 2 - round N-1 eor v16.16b, v16.16b, v8.16b // h2k | h1k aese v3.16b, v31.16b // AES block 3 - round N-1 aese v0.16b, v31.16b // AES block 0 - round N-1 b.ge Ldec_tail // handle tail ldr q4, [x0, #0] // AES block 0 - load ciphertext ldr q5, [x0, #16] // AES block 1 - load ciphertext rev w9, w12 // CTR block 4 eor v0.16b, v4.16b, v0.16b // AES block 0 - result eor v1.16b, v5.16b, v1.16b // AES block 1 - result rev64 v5.16b, v5.16b // GHASH block 1 ldr q7, [x0, #48] // AES block 3 - load ciphertext mov x7, v0.d[1] // AES block 0 - mov high mov x6, v0.d[0] // AES block 0 - mov low rev64 v4.16b, v4.16b // GHASH block 0 add w12, w12, #1 // CTR block 4 fmov d0, x10 // CTR block 4 orr x9, x11, x9, lsl #32 // CTR block 4 fmov v0.d[1], x9 // CTR block 4 rev w9, w12 // CTR block 5 add w12, w12, #1 // CTR block 5 mov x19, v1.d[0] // AES block 1 - mov low orr x9, x11, x9, lsl #32 // CTR block 5 mov x20, v1.d[1] // AES block 1 - mov high eor x7, x7, x14 // AES block 0 - round N high eor x6, x6, x13 // AES block 0 - round N low stp x6, x7, [x2], #16 // AES block 0 - store result fmov d1, x10 // CTR block 5 ldr q6, [x0, #32] // AES block 2 - load ciphertext add x0, x0, #64 // AES input_ptr update fmov v1.d[1], x9 // CTR block 5 rev w9, w12 // CTR block 6 add w12, w12, #1 // CTR block 6 eor x19, x19, x13 // AES block 1 - round N low orr x9, x11, x9, lsl #32 // CTR block 6 eor x20, x20, x14 // AES block 1 - round N high stp x19, x20, [x2], #16 // AES block 1 - store result eor v2.16b, v6.16b, v2.16b // AES block 2 - result cmp x0, x5 // check if we have <= 8 blocks b.ge Ldec_prepretail // do prepretail Ldec_main_loop: // main loop start mov x21, v2.d[0] // AES block 4k+2 - mov low ext v11.16b, v11.16b, v11.16b, #8 // PRE 0 eor v3.16b, v7.16b, v3.16b // AES block 4k+3 - result aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 0 mov x22, v2.d[1] // AES block 4k+2 - mov high aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 0 fmov d2, x10 // CTR block 4k+6 fmov v2.d[1], x9 // CTR block 4k+6 eor v4.16b, v4.16b, v11.16b // PRE 1 rev w9, w12 // CTR block 4k+7 aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 1 mov x24, v3.d[1] // AES block 4k+3 - mov high aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 1 mov x23, v3.d[0] // AES block 4k+3 - mov low pmull2 v9.1q, v4.2d, v15.2d // GHASH block 4k - high mov d8, v4.d[1] // GHASH block 4k - mid fmov d3, x10 // CTR block 4k+7 aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 2 orr x9, x11, x9, lsl #32 // CTR block 4k+7 aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 0 fmov v3.d[1], x9 // CTR block 4k+7 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 2 eor v8.8b, v8.8b, v4.8b // GHASH block 4k - mid aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 3 eor x22, x22, x14 // AES block 4k+2 - round N high aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 1 mov d10, v17.d[1] // GHASH block 4k - mid aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 3 rev64 v6.16b, v6.16b // GHASH block 4k+2 aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 0 eor x21, x21, x13 // AES block 4k+2 - round N low aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 2 stp x21, x22, [x2], #16 // AES block 4k+2 - store result pmull v11.1q, v4.1d, v15.1d // GHASH block 4k - low pmull2 v4.1q, v5.2d, v14.2d // GHASH block 4k+1 - high aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 3 rev64 v7.16b, v7.16b // GHASH block 4k+3 pmull v10.1q, v8.1d, v10.1d // GHASH block 4k - mid eor x23, x23, x13 // AES block 4k+3 - round N low pmull v8.1q, v5.1d, v14.1d // GHASH block 4k+1 - low eor x24, x24, x14 // AES block 4k+3 - round N high eor v9.16b, v9.16b, v4.16b // GHASH block 4k+1 - high aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 4 aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 1 mov d4, v5.d[1] // GHASH block 4k+1 - mid aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 4 eor v11.16b, v11.16b, v8.16b // GHASH block 4k+1 - low aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 5 add w12, w12, #1 // CTR block 4k+7 aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 2 mov d8, v6.d[1] // GHASH block 4k+2 - mid aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 4 eor v4.8b, v4.8b, v5.8b // GHASH block 4k+1 - mid pmull v5.1q, v6.1d, v13.1d // GHASH block 4k+2 - low aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 3 eor v8.8b, v8.8b, v6.8b // GHASH block 4k+2 - mid aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 5 aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 5 eor v11.16b, v11.16b, v5.16b // GHASH block 4k+2 - low pmull v4.1q, v4.1d, v17.1d // GHASH block 4k+1 - mid rev w9, w12 // CTR block 4k+8 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 6 ins v8.d[1], v8.d[0] // GHASH block 4k+2 - mid aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 6 add w12, w12, #1 // CTR block 4k+8 aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 4 aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 7 eor v10.16b, v10.16b, v4.16b // GHASH block 4k+1 - mid aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 7 pmull2 v4.1q, v6.2d, v13.2d // GHASH block 4k+2 - high mov d6, v7.d[1] // GHASH block 4k+3 - mid aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 5 pmull2 v8.1q, v8.2d, v16.2d // GHASH block 4k+2 - mid aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 8 eor v9.16b, v9.16b, v4.16b // GHASH block 4k+2 - high aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 6 pmull v4.1q, v7.1d, v12.1d // GHASH block 4k+3 - low orr x9, x11, x9, lsl #32 // CTR block 4k+8 eor v10.16b, v10.16b, v8.16b // GHASH block 4k+2 - mid pmull2 v5.1q, v7.2d, v12.2d // GHASH block 4k+3 - high cmp x17, #12 // setup flags for AES-128/192/256 check eor v6.8b, v6.8b, v7.8b // GHASH block 4k+3 - mid aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 8 aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 6 eor v9.16b, v9.16b, v5.16b // GHASH block 4k+3 - high pmull v6.1q, v6.1d, v16.1d // GHASH block 4k+3 - mid movi v8.8b, #0xc2 aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 7 eor v11.16b, v11.16b, v4.16b // GHASH block 4k+3 - low aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 7 shl d8, d8, #56 // mod_constant aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 8 eor v10.16b, v10.16b, v6.16b // GHASH block 4k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 8 b.lt Ldec_main_loop_continue // branch if AES-128 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 9 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 9 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 10 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 10 b.eq Ldec_main_loop_continue // branch if AES-192 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 11 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 11 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 11 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 11 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 12 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 12 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 12 Ldec_main_loop_continue: pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid eor v6.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up ldr q4, [x0, #0] // AES block 4k+4 - load ciphertext aese v0.16b, v31.16b // AES block 4k+4 - round N-1 ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor v10.16b, v10.16b, v6.16b // MODULO - karatsuba tidy up ldr q5, [x0, #16] // AES block 4k+5 - load ciphertext eor v0.16b, v4.16b, v0.16b // AES block 4k+4 - result stp x23, x24, [x2], #16 // AES block 4k+3 - store result eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid ldr q7, [x0, #48] // AES block 4k+7 - load ciphertext ldr q6, [x0, #32] // AES block 4k+6 - load ciphertext mov x7, v0.d[1] // AES block 4k+4 - mov high eor v10.16b, v10.16b, v9.16b // MODULO - fold into mid aese v1.16b, v31.16b // AES block 4k+5 - round N-1 add x0, x0, #64 // AES input_ptr update mov x6, v0.d[0] // AES block 4k+4 - mov low fmov d0, x10 // CTR block 4k+8 fmov v0.d[1], x9 // CTR block 4k+8 pmull v8.1q, v10.1d, v8.1d // MODULO - mid 64b align with low eor v1.16b, v5.16b, v1.16b // AES block 4k+5 - result rev w9, w12 // CTR block 4k+9 aese v2.16b, v31.16b // AES block 4k+6 - round N-1 orr x9, x11, x9, lsl #32 // CTR block 4k+9 cmp x0, x5 // LOOP CONTROL add w12, w12, #1 // CTR block 4k+9 eor x6, x6, x13 // AES block 4k+4 - round N low eor x7, x7, x14 // AES block 4k+4 - round N high mov x20, v1.d[1] // AES block 4k+5 - mov high eor v2.16b, v6.16b, v2.16b // AES block 4k+6 - result eor v11.16b, v11.16b, v8.16b // MODULO - fold into low mov x19, v1.d[0] // AES block 4k+5 - mov low fmov d1, x10 // CTR block 4k+9 ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment fmov v1.d[1], x9 // CTR block 4k+9 rev w9, w12 // CTR block 4k+10 add w12, w12, #1 // CTR block 4k+10 aese v3.16b, v31.16b // AES block 4k+7 - round N-1 orr x9, x11, x9, lsl #32 // CTR block 4k+10 rev64 v5.16b, v5.16b // GHASH block 4k+5 eor x20, x20, x14 // AES block 4k+5 - round N high stp x6, x7, [x2], #16 // AES block 4k+4 - store result eor x19, x19, x13 // AES block 4k+5 - round N low stp x19, x20, [x2], #16 // AES block 4k+5 - store result rev64 v4.16b, v4.16b // GHASH block 4k+4 eor v11.16b, v11.16b, v10.16b // MODULO - fold into low b.lt Ldec_main_loop Ldec_prepretail: // PREPRETAIL ext v11.16b, v11.16b, v11.16b, #8 // PRE 0 mov x21, v2.d[0] // AES block 4k+2 - mov low eor v3.16b, v7.16b, v3.16b // AES block 4k+3 - result aese v0.16b, v18.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 0 mov x22, v2.d[1] // AES block 4k+2 - mov high aese v1.16b, v18.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 0 fmov d2, x10 // CTR block 4k+6 fmov v2.d[1], x9 // CTR block 4k+6 rev w9, w12 // CTR block 4k+7 eor v4.16b, v4.16b, v11.16b // PRE 1 rev64 v6.16b, v6.16b // GHASH block 4k+2 orr x9, x11, x9, lsl #32 // CTR block 4k+7 mov x23, v3.d[0] // AES block 4k+3 - mov low aese v1.16b, v19.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 1 mov x24, v3.d[1] // AES block 4k+3 - mov high pmull v11.1q, v4.1d, v15.1d // GHASH block 4k - low mov d8, v4.d[1] // GHASH block 4k - mid fmov d3, x10 // CTR block 4k+7 pmull2 v9.1q, v4.2d, v15.2d // GHASH block 4k - high fmov v3.d[1], x9 // CTR block 4k+7 aese v2.16b, v18.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 0 mov d10, v17.d[1] // GHASH block 4k - mid aese v0.16b, v19.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 1 eor v8.8b, v8.8b, v4.8b // GHASH block 4k - mid pmull2 v4.1q, v5.2d, v14.2d // GHASH block 4k+1 - high aese v2.16b, v19.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 1 rev64 v7.16b, v7.16b // GHASH block 4k+3 aese v3.16b, v18.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 0 pmull v10.1q, v8.1d, v10.1d // GHASH block 4k - mid eor v9.16b, v9.16b, v4.16b // GHASH block 4k+1 - high pmull v8.1q, v5.1d, v14.1d // GHASH block 4k+1 - low aese v3.16b, v19.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 1 mov d4, v5.d[1] // GHASH block 4k+1 - mid aese v0.16b, v20.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 2 aese v1.16b, v20.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 2 eor v11.16b, v11.16b, v8.16b // GHASH block 4k+1 - low aese v2.16b, v20.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 2 aese v0.16b, v21.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 3 mov d8, v6.d[1] // GHASH block 4k+2 - mid aese v3.16b, v20.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 2 eor v4.8b, v4.8b, v5.8b // GHASH block 4k+1 - mid pmull v5.1q, v6.1d, v13.1d // GHASH block 4k+2 - low aese v0.16b, v22.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 4 aese v3.16b, v21.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 3 eor v8.8b, v8.8b, v6.8b // GHASH block 4k+2 - mid pmull v4.1q, v4.1d, v17.1d // GHASH block 4k+1 - mid aese v0.16b, v23.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 5 eor v11.16b, v11.16b, v5.16b // GHASH block 4k+2 - low aese v3.16b, v22.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 4 pmull2 v5.1q, v7.2d, v12.2d // GHASH block 4k+3 - high eor v10.16b, v10.16b, v4.16b // GHASH block 4k+1 - mid pmull2 v4.1q, v6.2d, v13.2d // GHASH block 4k+2 - high aese v3.16b, v23.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 5 ins v8.d[1], v8.d[0] // GHASH block 4k+2 - mid aese v2.16b, v21.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 3 aese v1.16b, v21.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 3 eor v9.16b, v9.16b, v4.16b // GHASH block 4k+2 - high pmull v4.1q, v7.1d, v12.1d // GHASH block 4k+3 - low aese v2.16b, v22.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 4 mov d6, v7.d[1] // GHASH block 4k+3 - mid aese v1.16b, v22.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 4 pmull2 v8.1q, v8.2d, v16.2d // GHASH block 4k+2 - mid aese v2.16b, v23.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 5 eor v6.8b, v6.8b, v7.8b // GHASH block 4k+3 - mid aese v1.16b, v23.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 5 aese v3.16b, v24.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 6 eor v10.16b, v10.16b, v8.16b // GHASH block 4k+2 - mid aese v2.16b, v24.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 6 aese v0.16b, v24.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 6 movi v8.8b, #0xc2 aese v1.16b, v24.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 6 eor v11.16b, v11.16b, v4.16b // GHASH block 4k+3 - low pmull v6.1q, v6.1d, v16.1d // GHASH block 4k+3 - mid aese v3.16b, v25.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 7 cmp x17, #12 // setup flags for AES-128/192/256 check eor v9.16b, v9.16b, v5.16b // GHASH block 4k+3 - high aese v1.16b, v25.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 7 aese v0.16b, v25.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 7 eor v10.16b, v10.16b, v6.16b // GHASH block 4k+3 - mid aese v3.16b, v26.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 8 aese v2.16b, v25.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 7 eor v6.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up aese v1.16b, v26.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 8 aese v0.16b, v26.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 8 shl d8, d8, #56 // mod_constant aese v2.16b, v26.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 8 b.lt Ldec_finish_prepretail // branch if AES-128 aese v1.16b, v27.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 9 aese v2.16b, v27.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 9 aese v3.16b, v27.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 9 aese v0.16b, v27.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 9 aese v2.16b, v28.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 10 aese v3.16b, v28.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 10 aese v0.16b, v28.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 10 aese v1.16b, v28.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 10 b.eq Ldec_finish_prepretail // branch if AES-192 aese v2.16b, v29.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 11 aese v0.16b, v29.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 11 aese v1.16b, v29.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 11 aese v2.16b, v30.16b aesmc v2.16b, v2.16b // AES block 4k+6 - round 12 aese v3.16b, v29.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 11 aese v1.16b, v30.16b aesmc v1.16b, v1.16b // AES block 4k+5 - round 12 aese v0.16b, v30.16b aesmc v0.16b, v0.16b // AES block 4k+4 - round 12 aese v3.16b, v30.16b aesmc v3.16b, v3.16b // AES block 4k+7 - round 12 Ldec_finish_prepretail: eor v10.16b, v10.16b, v6.16b // MODULO - karatsuba tidy up pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid eor x22, x22, x14 // AES block 4k+2 - round N high eor x23, x23, x13 // AES block 4k+3 - round N low eor v10.16b, v10.16b, v9.16b // MODULO - fold into mid add w12, w12, #1 // CTR block 4k+7 eor x21, x21, x13 // AES block 4k+2 - round N low pmull v8.1q, v10.1d, v8.1d // MODULO - mid 64b align with low eor x24, x24, x14 // AES block 4k+3 - round N high stp x21, x22, [x2], #16 // AES block 4k+2 - store result ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment stp x23, x24, [x2], #16 // AES block 4k+3 - store result eor v11.16b, v11.16b, v8.16b // MODULO - fold into low aese v1.16b, v31.16b // AES block 4k+5 - round N-1 aese v0.16b, v31.16b // AES block 4k+4 - round N-1 aese v3.16b, v31.16b // AES block 4k+7 - round N-1 aese v2.16b, v31.16b // AES block 4k+6 - round N-1 eor v11.16b, v11.16b, v10.16b // MODULO - fold into low Ldec_tail: // TAIL sub x5, x4, x0 // main_end_input_ptr is number of bytes left to process ld1 { v5.16b}, [x0], #16 // AES block 4k+4 - load ciphertext eor v0.16b, v5.16b, v0.16b // AES block 4k+4 - result mov x6, v0.d[0] // AES block 4k+4 - mov low mov x7, v0.d[1] // AES block 4k+4 - mov high ext v8.16b, v11.16b, v11.16b, #8 // prepare final partial tag cmp x5, #48 eor x6, x6, x13 // AES block 4k+4 - round N low eor x7, x7, x14 // AES block 4k+4 - round N high b.gt Ldec_blocks_4_remaining sub w12, w12, #1 mov v3.16b, v2.16b movi v10.8b, #0 movi v11.8b, #0 cmp x5, #32 movi v9.8b, #0 mov v2.16b, v1.16b b.gt Ldec_blocks_3_remaining sub w12, w12, #1 mov v3.16b, v1.16b cmp x5, #16 b.gt Ldec_blocks_2_remaining sub w12, w12, #1 b Ldec_blocks_1_remaining Ldec_blocks_4_remaining: // blocks left = 4 rev64 v4.16b, v5.16b // GHASH final-3 block ld1 { v5.16b}, [x0], #16 // AES final-2 block - load ciphertext stp x6, x7, [x2], #16 // AES final-3 block - store result mov d10, v17.d[1] // GHASH final-3 block - mid eor v4.16b, v4.16b, v8.16b // feed in partial tag eor v0.16b, v5.16b, v1.16b // AES final-2 block - result mov d22, v4.d[1] // GHASH final-3 block - mid mov x6, v0.d[0] // AES final-2 block - mov low mov x7, v0.d[1] // AES final-2 block - mov high eor v22.8b, v22.8b, v4.8b // GHASH final-3 block - mid movi v8.8b, #0 // suppress further partial tag feed in pmull2 v9.1q, v4.2d, v15.2d // GHASH final-3 block - high pmull v10.1q, v22.1d, v10.1d // GHASH final-3 block - mid eor x6, x6, x13 // AES final-2 block - round N low pmull v11.1q, v4.1d, v15.1d // GHASH final-3 block - low eor x7, x7, x14 // AES final-2 block - round N high Ldec_blocks_3_remaining: // blocks left = 3 rev64 v4.16b, v5.16b // GHASH final-2 block ld1 { v5.16b}, [x0], #16 // AES final-1 block - load ciphertext eor v4.16b, v4.16b, v8.16b // feed in partial tag stp x6, x7, [x2], #16 // AES final-2 block - store result eor v0.16b, v5.16b, v2.16b // AES final-1 block - result mov d22, v4.d[1] // GHASH final-2 block - mid pmull v21.1q, v4.1d, v14.1d // GHASH final-2 block - low pmull2 v20.1q, v4.2d, v14.2d // GHASH final-2 block - high eor v22.8b, v22.8b, v4.8b // GHASH final-2 block - mid mov x6, v0.d[0] // AES final-1 block - mov low mov x7, v0.d[1] // AES final-1 block - mov high eor v11.16b, v11.16b, v21.16b // GHASH final-2 block - low movi v8.8b, #0 // suppress further partial tag feed in pmull v22.1q, v22.1d, v17.1d // GHASH final-2 block - mid eor v9.16b, v9.16b, v20.16b // GHASH final-2 block - high eor x6, x6, x13 // AES final-1 block - round N low eor v10.16b, v10.16b, v22.16b // GHASH final-2 block - mid eor x7, x7, x14 // AES final-1 block - round N high Ldec_blocks_2_remaining: // blocks left = 2 stp x6, x7, [x2], #16 // AES final-1 block - store result rev64 v4.16b, v5.16b // GHASH final-1 block ld1 { v5.16b}, [x0], #16 // AES final block - load ciphertext eor v4.16b, v4.16b, v8.16b // feed in partial tag movi v8.8b, #0 // suppress further partial tag feed in mov d22, v4.d[1] // GHASH final-1 block - mid eor v0.16b, v5.16b, v3.16b // AES final block - result pmull2 v20.1q, v4.2d, v13.2d // GHASH final-1 block - high eor v22.8b, v22.8b, v4.8b // GHASH final-1 block - mid pmull v21.1q, v4.1d, v13.1d // GHASH final-1 block - low mov x6, v0.d[0] // AES final block - mov low ins v22.d[1], v22.d[0] // GHASH final-1 block - mid mov x7, v0.d[1] // AES final block - mov high pmull2 v22.1q, v22.2d, v16.2d // GHASH final-1 block - mid eor x6, x6, x13 // AES final block - round N low eor v11.16b, v11.16b, v21.16b // GHASH final-1 block - low eor v9.16b, v9.16b, v20.16b // GHASH final-1 block - high eor v10.16b, v10.16b, v22.16b // GHASH final-1 block - mid eor x7, x7, x14 // AES final block - round N high Ldec_blocks_1_remaining: // blocks_left = 1 rev w9, w12 rev64 v4.16b, v5.16b // GHASH final block eor v4.16b, v4.16b, v8.16b // feed in partial tag pmull v21.1q, v4.1d, v12.1d // GHASH final block - low mov d8, v4.d[1] // GHASH final block - mid eor v8.8b, v8.8b, v4.8b // GHASH final block - mid pmull2 v20.1q, v4.2d, v12.2d // GHASH final block - high pmull v8.1q, v8.1d, v16.1d // GHASH final block - mid eor v9.16b, v9.16b, v20.16b // GHASH final block - high eor v11.16b, v11.16b, v21.16b // GHASH final block - low eor v10.16b, v10.16b, v8.16b // GHASH final block - mid movi v8.8b, #0xc2 eor v6.16b, v11.16b, v9.16b // MODULO - karatsuba tidy up shl d8, d8, #56 // mod_constant eor v10.16b, v10.16b, v6.16b // MODULO - karatsuba tidy up pmull v7.1q, v9.1d, v8.1d // MODULO - top 64b align with mid ext v9.16b, v9.16b, v9.16b, #8 // MODULO - other top alignment eor v10.16b, v10.16b, v7.16b // MODULO - fold into mid eor v10.16b, v10.16b, v9.16b // MODULO - fold into mid pmull v8.1q, v10.1d, v8.1d // MODULO - mid 64b align with low ext v10.16b, v10.16b, v10.16b, #8 // MODULO - other mid alignment eor v11.16b, v11.16b, v8.16b // MODULO - fold into low stp x6, x7, [x2] str w9, [x16, #12] // store the updated counter eor v11.16b, v11.16b, v10.16b // MODULO - fold into low ext v11.16b, v11.16b, v11.16b, #8 rev64 v11.16b, v11.16b mov x0, x15 st1 { v11.16b }, [x3] ldp x19, x20, [sp, #16] ldp x21, x22, [sp, #32] ldp x23, x24, [sp, #48] ldp d8, d9, [sp, #64] ldp d10, d11, [sp, #80] ldp d12, d13, [sp, #96] ldp d14, d15, [sp, #112] ldp x29, x30, [sp], #128 AARCH64_VALIDATE_LINK_REGISTER ret #endif #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
21,744
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/keccak1600-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .text .align 8 // strategic alignment and padding that allows to use // address value as loop termination condition... .quad 0,0,0,0,0,0,0,0 iotas: .quad 0x0000000000000001 .quad 0x0000000000008082 .quad 0x800000000000808a .quad 0x8000000080008000 .quad 0x000000000000808b .quad 0x0000000080000001 .quad 0x8000000080008081 .quad 0x8000000000008009 .quad 0x000000000000008a .quad 0x0000000000000088 .quad 0x0000000080008009 .quad 0x000000008000000a .quad 0x000000008000808b .quad 0x800000000000008b .quad 0x8000000000008089 .quad 0x8000000000008003 .quad 0x8000000000008002 .quad 0x8000000000000080 .quad 0x000000000000800a .quad 0x800000008000000a .quad 0x8000000080008081 .quad 0x8000000000008080 .quad 0x0000000080000001 .quad 0x8000000080008008 .align 5 KeccakF1600_int: AARCH64_SIGN_LINK_REGISTER adr x28,iotas stp x28,x30,[sp,#16] // 32 bytes on top are mine b Loop .align 4 Loop: ////////////////////////////////////////// Theta eor x26,x0,x5 stp x4,x9,[sp,#0] // offload pair... eor x27,x1,x6 eor x28,x2,x7 eor x30,x3,x8 eor x4,x4,x9 eor x26,x26,x10 eor x27,x27,x11 eor x28,x28,x12 eor x30,x30,x13 eor x4,x4,x14 eor x26,x26,x15 eor x27,x27,x16 eor x28,x28,x17 eor x30,x30,x25 eor x4,x4,x19 eor x26,x26,x20 eor x28,x28,x22 eor x27,x27,x21 eor x30,x30,x23 eor x4,x4,x24 eor x9,x26,x28,ror#63 eor x1,x1,x9 eor x6,x6,x9 eor x11,x11,x9 eor x16,x16,x9 eor x21,x21,x9 eor x9,x27,x30,ror#63 eor x28,x28,x4,ror#63 eor x30,x30,x26,ror#63 eor x4,x4,x27,ror#63 eor x27, x2,x9 // mov x27,x2 eor x7,x7,x9 eor x12,x12,x9 eor x17,x17,x9 eor x22,x22,x9 eor x0,x0,x4 eor x5,x5,x4 eor x10,x10,x4 eor x15,x15,x4 eor x20,x20,x4 ldp x4,x9,[sp,#0] // re-load offloaded data eor x26, x3,x28 // mov x26,x3 eor x8,x8,x28 eor x13,x13,x28 eor x25,x25,x28 eor x23,x23,x28 eor x28, x4,x30 // mov x28,x4 eor x9,x9,x30 eor x14,x14,x30 eor x19,x19,x30 eor x24,x24,x30 ////////////////////////////////////////// Rho+Pi mov x30,x1 ror x1,x6,#20 //mov x27,x2 ror x2,x12,#21 //mov x26,x3 ror x3,x25,#43 //mov x28,x4 ror x4,x24,#50 ror x6,x9,#44 ror x12,x13,#39 ror x25,x17,#49 ror x24,x21,#62 ror x9,x22,#3 ror x13,x19,#56 ror x17,x11,#54 ror x21,x8,#9 ror x22,x14,#25 ror x19,x23,#8 ror x11,x7,#58 ror x8,x16,#19 ror x14,x20,#46 ror x23,x15,#23 ror x7,x10,#61 ror x16,x5,#28 ror x5,x26,#36 ror x10,x30,#63 ror x15,x28,#37 ror x20,x27,#2 ////////////////////////////////////////// Chi+Iota bic x26,x2,x1 bic x27,x3,x2 bic x28,x0,x4 bic x30,x1,x0 eor x0,x0,x26 bic x26,x4,x3 eor x1,x1,x27 ldr x27,[sp,#16] eor x3,x3,x28 eor x4,x4,x30 eor x2,x2,x26 ldr x30,[x27],#8 // Iota[i++] bic x26,x7,x6 tst x27,#255 // are we done? str x27,[sp,#16] bic x27,x8,x7 bic x28,x5,x9 eor x0,x0,x30 // A[0][0] ^= Iota bic x30,x6,x5 eor x5,x5,x26 bic x26,x9,x8 eor x6,x6,x27 eor x8,x8,x28 eor x9,x9,x30 eor x7,x7,x26 bic x26,x12,x11 bic x27,x13,x12 bic x28,x10,x14 bic x30,x11,x10 eor x10,x10,x26 bic x26,x14,x13 eor x11,x11,x27 eor x13,x13,x28 eor x14,x14,x30 eor x12,x12,x26 bic x26,x17,x16 bic x27,x25,x17 bic x28,x15,x19 bic x30,x16,x15 eor x15,x15,x26 bic x26,x19,x25 eor x16,x16,x27 eor x25,x25,x28 eor x19,x19,x30 eor x17,x17,x26 bic x26,x22,x21 bic x27,x23,x22 bic x28,x20,x24 bic x30,x21,x20 eor x20,x20,x26 bic x26,x24,x23 eor x21,x21,x27 eor x23,x23,x28 eor x24,x24,x30 eor x22,x22,x26 bne Loop ldr x30,[sp,#24] AARCH64_VALIDATE_LINK_REGISTER ret .align 5 KeccakF1600: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#48 str x0,[sp,#32] // offload argument mov x26,x0 ldp x0,x1,[x0,#16*0] ldp x2,x3,[x26,#16*1] ldp x4,x5,[x26,#16*2] ldp x6,x7,[x26,#16*3] ldp x8,x9,[x26,#16*4] ldp x10,x11,[x26,#16*5] ldp x12,x13,[x26,#16*6] ldp x14,x15,[x26,#16*7] ldp x16,x17,[x26,#16*8] ldp x25,x19,[x26,#16*9] ldp x20,x21,[x26,#16*10] ldp x22,x23,[x26,#16*11] ldr x24,[x26,#16*12] bl KeccakF1600_int ldr x26,[sp,#32] stp x0,x1,[x26,#16*0] stp x2,x3,[x26,#16*1] stp x4,x5,[x26,#16*2] stp x6,x7,[x26,#16*3] stp x8,x9,[x26,#16*4] stp x10,x11,[x26,#16*5] stp x12,x13,[x26,#16*6] stp x14,x15,[x26,#16*7] stp x16,x17,[x26,#16*8] stp x25,x19,[x26,#16*9] stp x20,x21,[x26,#16*10] stp x22,x23,[x26,#16*11] str x24,[x26,#16*12] ldp x19,x20,[x29,#16] add sp,sp,#48 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .globl _SHA3_Absorb_hw .private_extern _SHA3_Absorb_hw .align 5 _SHA3_Absorb_hw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#64 stp x0,x1,[sp,#32] // offload arguments stp x2,x3,[sp,#48] mov x26,x0 // uint64_t A[5][5] mov x27,x1 // const void *inp mov x28,x2 // size_t len mov x30,x3 // size_t bsz ldp x0,x1,[x26,#16*0] ldp x2,x3,[x26,#16*1] ldp x4,x5,[x26,#16*2] ldp x6,x7,[x26,#16*3] ldp x8,x9,[x26,#16*4] ldp x10,x11,[x26,#16*5] ldp x12,x13,[x26,#16*6] ldp x14,x15,[x26,#16*7] ldp x16,x17,[x26,#16*8] ldp x25,x19,[x26,#16*9] ldp x20,x21,[x26,#16*10] ldp x22,x23,[x26,#16*11] ldr x24,[x26,#16*12] b Loop_absorb .align 4 Loop_absorb: subs x26,x28,x30 // len - bsz blo Labsorbed str x26,[sp,#48] // save len - bsz ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x0,x0,x26 cmp x30,#8*(0+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x1,x1,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x2,x2,x26 cmp x30,#8*(2+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x3,x3,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x4,x4,x26 cmp x30,#8*(4+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x5,x5,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x6,x6,x26 cmp x30,#8*(6+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x7,x7,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x8,x8,x26 cmp x30,#8*(8+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x9,x9,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x10,x10,x26 cmp x30,#8*(10+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x11,x11,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x12,x12,x26 cmp x30,#8*(12+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x13,x13,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x14,x14,x26 cmp x30,#8*(14+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x15,x15,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x16,x16,x26 cmp x30,#8*(16+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x17,x17,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x25,x25,x26 cmp x30,#8*(18+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x19,x19,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x20,x20,x26 cmp x30,#8*(20+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x21,x21,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x22,x22,x26 cmp x30,#8*(22+2) blo Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x23,x23,x26 beq Lprocess_block ldr x26,[x27],#8 // *inp++ #ifdef __AARCH64EB__ rev x26,x26 #endif eor x24,x24,x26 Lprocess_block: str x27,[sp,#40] // save inp bl KeccakF1600_int ldr x27,[sp,#40] // restore arguments ldp x28,x30,[sp,#48] b Loop_absorb .align 4 Labsorbed: ldr x27,[sp,#32] stp x0,x1,[x27,#16*0] stp x2,x3,[x27,#16*1] stp x4,x5,[x27,#16*2] stp x6,x7,[x27,#16*3] stp x8,x9,[x27,#16*4] stp x10,x11,[x27,#16*5] stp x12,x13,[x27,#16*6] stp x14,x15,[x27,#16*7] stp x16,x17,[x27,#16*8] stp x25,x19,[x27,#16*9] stp x20,x21,[x27,#16*10] stp x22,x23,[x27,#16*11] str x24,[x27,#16*12] mov x0,x28 // return value ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .globl _SHA3_Squeeze_hw .private_extern _SHA3_Squeeze_hw .align 5 _SHA3_Squeeze_hw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-48]! add x29,sp,#0 cmp x2,#0 beq Lsqueeze_abort stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] mov x19,x0 // put aside arguments mov x20,x1 mov x21,x2 mov x22,x3 cmp x4, #0 // x4 = 'padded' argument; if !=0, perform Keccak first bne Lnext_block Loop_squeeze: ldr x4,[x0],#8 cmp x21,#8 blo Lsqueeze_tail #ifdef __AARCH64EB__ rev x4,x4 #endif str x4,[x20],#8 subs x21,x21,#8 beq Lsqueeze_done subs x3,x3,#8 bhi Loop_squeeze Lnext_block: mov x0,x19 bl KeccakF1600 mov x0,x19 mov x3,x22 b Loop_squeeze .align 4 Lsqueeze_tail: strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq Lsqueeze_done strb w4,[x20],#1 lsr x4,x4,#8 subs x21,x21,#1 beq Lsqueeze_done strb w4,[x20],#1 Lsqueeze_done: ldp x19,x20,[sp,#16] ldp x21,x22,[sp,#32] Lsqueeze_abort: ldp x29,x30,[sp],#48 AARCH64_VALIDATE_LINK_REGISTER ret .align 5 KeccakF1600_ce: mov x9,#24 adr x10,iotas b Loop_ce .align 4 Loop_ce: ////////////////////////////////////////////////// Theta .long 0xce0f2a99 //eor3 v25.16b,v20.16b,v15.16b,v10.16b .long 0xce102eba //eor3 v26.16b,v21.16b,v16.16b,v11.16b .long 0xce1132db //eor3 v27.16b,v22.16b,v17.16b,v12.16b .long 0xce1236fc //eor3 v28.16b,v23.16b,v18.16b,v13.16b .long 0xce133b1d //eor3 v29.16b,v24.16b,v19.16b,v14.16b .long 0xce050339 //eor3 v25.16b,v25.16b, v5.16b,v0.16b .long 0xce06075a //eor3 v26.16b,v26.16b, v6.16b,v1.16b .long 0xce070b7b //eor3 v27.16b,v27.16b, v7.16b,v2.16b .long 0xce080f9c //eor3 v28.16b,v28.16b, v8.16b,v3.16b .long 0xce0913bd //eor3 v29.16b,v29.16b, v9.16b,v4.16b .long 0xce7b8f3e //rax1 v30.16b,v25.16b,v27.16b // D[1] .long 0xce7c8f5f //rax1 v31.16b,v26.16b,v28.16b // D[2] .long 0xce7d8f7b //rax1 v27.16b,v27.16b,v29.16b // D[3] .long 0xce798f9c //rax1 v28.16b,v28.16b,v25.16b // D[4] .long 0xce7a8fbd //rax1 v29.16b,v29.16b,v26.16b // D[0] ////////////////////////////////////////////////// Theta+Rho+Pi .long 0xce9efc39 //xar v25.16b, v1.16b,v30.16b,#63 // C[0]=A[2][0] .long 0xce9e50c1 //xar v1.16b,v6.16b,v30.16b,#20 .long 0xce9cb126 //xar v6.16b,v9.16b,v28.16b,#44 .long 0xce9f0ec9 //xar v9.16b,v22.16b,v31.16b,#3 .long 0xce9c65d6 //xar v22.16b,v14.16b,v28.16b,#25 .long 0xce9dba8e //xar v14.16b,v20.16b,v29.16b,#46 .long 0xce9f085a //xar v26.16b, v2.16b,v31.16b,#2 // C[1]=A[4][0] .long 0xce9f5582 //xar v2.16b,v12.16b,v31.16b,#21 .long 0xce9b9dac //xar v12.16b,v13.16b,v27.16b,#39 .long 0xce9ce26d //xar v13.16b,v19.16b,v28.16b,#56 .long 0xce9b22f3 //xar v19.16b,v23.16b,v27.16b,#8 .long 0xce9d5df7 //xar v23.16b,v15.16b,v29.16b,#23 .long 0xce9c948f //xar v15.16b,v4.16b,v28.16b,#37 .long 0xce9ccb1c //xar v28.16b, v24.16b,v28.16b,#50 // D[4]=A[0][4] .long 0xce9efab8 //xar v24.16b,v21.16b,v30.16b,#62 .long 0xce9b2508 //xar v8.16b,v8.16b,v27.16b,#9 // A[1][3]=A[4][1] .long 0xce9e4e04 //xar v4.16b,v16.16b,v30.16b,#19 // A[0][4]=A[1][3] .long 0xce9d70b0 //xar v16.16b,v5.16b,v29.16b,#28 .long 0xce9b9065 //xar v5.16b,v3.16b,v27.16b,#36 eor v0.16b,v0.16b,v29.16b .long 0xce9bae5b //xar v27.16b, v18.16b,v27.16b,#43 // D[3]=A[0][3] .long 0xce9fc623 //xar v3.16b,v17.16b,v31.16b,#49 // A[0][3]=A[3][3] .long 0xce9ed97e //xar v30.16b, v11.16b,v30.16b,#54 // D[1]=A[3][2] .long 0xce9fe8ff //xar v31.16b, v7.16b,v31.16b,#58 // D[2]=A[2][1] .long 0xce9df55d //xar v29.16b, v10.16b,v29.16b,#61 // D[0]=A[1][2] ////////////////////////////////////////////////// Chi+Iota .long 0xce362354 //bcax v20.16b,v26.16b, v22.16b,v8.16b // A[1][3]=A[4][1] .long 0xce375915 //bcax v21.16b,v8.16b,v23.16b,v22.16b // A[1][3]=A[4][1] .long 0xce385ed6 //bcax v22.16b,v22.16b,v24.16b,v23.16b .long 0xce3a62f7 //bcax v23.16b,v23.16b,v26.16b, v24.16b .long 0xce286b18 //bcax v24.16b,v24.16b,v8.16b,v26.16b // A[1][3]=A[4][1] ld1r {v26.2d},[x10],#8 .long 0xce330fd1 //bcax v17.16b,v30.16b, v19.16b,v3.16b // A[0][3]=A[3][3] .long 0xce2f4c72 //bcax v18.16b,v3.16b,v15.16b,v19.16b // A[0][3]=A[3][3] .long 0xce303e73 //bcax v19.16b,v19.16b,v16.16b,v15.16b .long 0xce3e41ef //bcax v15.16b,v15.16b,v30.16b, v16.16b .long 0xce237a10 //bcax v16.16b,v16.16b,v3.16b,v30.16b // A[0][3]=A[3][3] .long 0xce2c7f2a //bcax v10.16b,v25.16b, v12.16b,v31.16b .long 0xce2d33eb //bcax v11.16b,v31.16b, v13.16b,v12.16b .long 0xce2e358c //bcax v12.16b,v12.16b,v14.16b,v13.16b .long 0xce3939ad //bcax v13.16b,v13.16b,v25.16b, v14.16b .long 0xce3f65ce //bcax v14.16b,v14.16b,v31.16b, v25.16b .long 0xce2913a7 //bcax v7.16b,v29.16b, v9.16b,v4.16b // A[0][4]=A[1][3] .long 0xce252488 //bcax v8.16b,v4.16b,v5.16b,v9.16b // A[0][4]=A[1][3] .long 0xce261529 //bcax v9.16b,v9.16b,v6.16b,v5.16b .long 0xce3d18a5 //bcax v5.16b,v5.16b,v29.16b, v6.16b .long 0xce2474c6 //bcax v6.16b,v6.16b,v4.16b,v29.16b // A[0][4]=A[1][3] .long 0xce207363 //bcax v3.16b,v27.16b, v0.16b,v28.16b .long 0xce210384 //bcax v4.16b,v28.16b, v1.16b,v0.16b .long 0xce220400 //bcax v0.16b,v0.16b,v2.16b,v1.16b .long 0xce3b0821 //bcax v1.16b,v1.16b,v27.16b, v2.16b .long 0xce3c6c42 //bcax v2.16b,v2.16b,v28.16b, v27.16b eor v0.16b,v0.16b,v26.16b subs x9,x9,#1 bne Loop_ce ret .align 5 KeccakF1600_cext: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-80]! add x29,sp,#0 stp d8,d9,[sp,#16] // per ABI requirement stp d10,d11,[sp,#32] stp d12,d13,[sp,#48] stp d14,d15,[sp,#64] ldp d0,d1,[x0,#8*0] ldp d2,d3,[x0,#8*2] ldp d4,d5,[x0,#8*4] ldp d6,d7,[x0,#8*6] ldp d8,d9,[x0,#8*8] ldp d10,d11,[x0,#8*10] ldp d12,d13,[x0,#8*12] ldp d14,d15,[x0,#8*14] ldp d16,d17,[x0,#8*16] ldp d18,d19,[x0,#8*18] ldp d20,d21,[x0,#8*20] ldp d22,d23,[x0,#8*22] ldr d24,[x0,#8*24] bl KeccakF1600_ce ldr x30,[sp,#8] stp d0,d1,[x0,#8*0] stp d2,d3,[x0,#8*2] stp d4,d5,[x0,#8*4] stp d6,d7,[x0,#8*6] stp d8,d9,[x0,#8*8] stp d10,d11,[x0,#8*10] stp d12,d13,[x0,#8*12] stp d14,d15,[x0,#8*14] stp d16,d17,[x0,#8*16] stp d18,d19,[x0,#8*18] stp d20,d21,[x0,#8*20] stp d22,d23,[x0,#8*22] str d24,[x0,#8*24] ldp d8,d9,[sp,#16] ldp d10,d11,[sp,#32] ldp d12,d13,[sp,#48] ldp d14,d15,[sp,#64] ldr x29,[sp],#80 AARCH64_VALIDATE_LINK_REGISTER ret .globl _SHA3_Absorb_cext .private_extern _SHA3_Absorb_cext .align 5 _SHA3_Absorb_cext: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-80]! add x29,sp,#0 stp d8,d9,[sp,#16] // per ABI requirement stp d10,d11,[sp,#32] stp d12,d13,[sp,#48] stp d14,d15,[sp,#64] ldp d0,d1,[x0,#8*0] ldp d2,d3,[x0,#8*2] ldp d4,d5,[x0,#8*4] ldp d6,d7,[x0,#8*6] ldp d8,d9,[x0,#8*8] ldp d10,d11,[x0,#8*10] ldp d12,d13,[x0,#8*12] ldp d14,d15,[x0,#8*14] ldp d16,d17,[x0,#8*16] ldp d18,d19,[x0,#8*18] ldp d20,d21,[x0,#8*20] ldp d22,d23,[x0,#8*22] ldr d24,[x0,#8*24] b Loop_absorb_ce .align 4 Loop_absorb_ce: subs x2,x2,x3 // len - bsz blo Labsorbed_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v0.16b,v0.16b,v31.16b cmp x3,#8*(0+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v1.16b,v1.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v2.16b,v2.16b,v31.16b cmp x3,#8*(2+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v3.16b,v3.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v4.16b,v4.16b,v31.16b cmp x3,#8*(4+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v5.16b,v5.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v6.16b,v6.16b,v31.16b cmp x3,#8*(6+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v7.16b,v7.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v8.16b,v8.16b,v31.16b cmp x3,#8*(8+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v9.16b,v9.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v10.16b,v10.16b,v31.16b cmp x3,#8*(10+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v11.16b,v11.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v12.16b,v12.16b,v31.16b cmp x3,#8*(12+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v13.16b,v13.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v14.16b,v14.16b,v31.16b cmp x3,#8*(14+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v15.16b,v15.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v16.16b,v16.16b,v31.16b cmp x3,#8*(16+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v17.16b,v17.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v18.16b,v18.16b,v31.16b cmp x3,#8*(18+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v19.16b,v19.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v20.16b,v20.16b,v31.16b cmp x3,#8*(20+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v21.16b,v21.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v22.16b,v22.16b,v31.16b cmp x3,#8*(22+2) blo Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v23.16b,v23.16b,v31.16b beq Lprocess_block_ce ldr d31,[x1],#8 // *inp++ #ifdef __AARCH64EB__ rev64 v31.16b,v31.16b #endif eor v24.16b,v24.16b,v31.16b Lprocess_block_ce: bl KeccakF1600_ce b Loop_absorb_ce .align 4 Labsorbed_ce: stp d0,d1,[x0,#8*0] stp d2,d3,[x0,#8*2] stp d4,d5,[x0,#8*4] stp d6,d7,[x0,#8*6] stp d8,d9,[x0,#8*8] stp d10,d11,[x0,#8*10] stp d12,d13,[x0,#8*12] stp d14,d15,[x0,#8*14] stp d16,d17,[x0,#8*16] stp d18,d19,[x0,#8*18] stp d20,d21,[x0,#8*20] stp d22,d23,[x0,#8*22] str d24,[x0,#8*24] add x0,x2,x3 // return value ldp d8,d9,[sp,#16] ldp d10,d11,[sp,#32] ldp d12,d13,[sp,#48] ldp d14,d15,[sp,#64] ldp x29,x30,[sp],#80 AARCH64_VALIDATE_LINK_REGISTER ret .globl _SHA3_Squeeze_cext .private_extern _SHA3_Squeeze_cext .align 5 _SHA3_Squeeze_cext: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-16]! add x29,sp,#0 cmp x2,#0 beq Lsqueeze_done_ce mov x9,x0 mov x10,x3 Loop_squeeze_ce: ldr x4,[x9],#8 cmp x2,#8 blo Lsqueeze_tail_ce #ifdef __AARCH64EB__ rev x4,x4 #endif str x4,[x1],#8 beq Lsqueeze_done_ce sub x2,x2,#8 subs x10,x10,#8 bhi Loop_squeeze_ce bl KeccakF1600_cext ldr x30,[sp,#8] mov x9,x0 mov x10,x3 b Loop_squeeze_ce .align 4 Lsqueeze_tail_ce: strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq Lsqueeze_done_ce strb w4,[x1],#1 lsr x4,x4,#8 subs x2,x2,#1 beq Lsqueeze_done_ce strb w4,[x1],#1 Lsqueeze_done_ce: ldr x29,[sp],#16 AARCH64_VALIDATE_LINK_REGISTER ret .byte 75,101,99,99,97,107,45,49,54,48,48,32,97,98,115,111,114,98,32,97,110,100,32,115,113,117,101,101,122,101,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
1,991
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/bn-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .text // BN_ULONG bn_add_words(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, // size_t num); .globl _bn_add_words .private_extern _bn_add_words .align 4 _bn_add_words: AARCH64_VALID_CALL_TARGET # Clear the carry flag. cmn xzr, xzr # aarch64 can load two registers at a time, so we do two loop iterations at # at a time. Split x3 = 2 * x8 + x3. This allows loop # operations to use CBNZ without clobbering the carry flag. lsr x8, x3, #1 and x3, x3, #1 cbz x8, Ladd_tail Ladd_loop: ldp x4, x5, [x1], #16 ldp x6, x7, [x2], #16 sub x8, x8, #1 adcs x4, x4, x6 adcs x5, x5, x7 stp x4, x5, [x0], #16 cbnz x8, Ladd_loop Ladd_tail: cbz x3, Ladd_exit ldr x4, [x1], #8 ldr x6, [x2], #8 adcs x4, x4, x6 str x4, [x0], #8 Ladd_exit: cset x0, cs ret // BN_ULONG bn_sub_words(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, // size_t num); .globl _bn_sub_words .private_extern _bn_sub_words .align 4 _bn_sub_words: AARCH64_VALID_CALL_TARGET # Set the carry flag. Arm's borrow bit is flipped from the carry flag, # so we want C = 1 here. cmp xzr, xzr # aarch64 can load two registers at a time, so we do two loop iterations at # at a time. Split x3 = 2 * x8 + x3. This allows loop # operations to use CBNZ without clobbering the carry flag. lsr x8, x3, #1 and x3, x3, #1 cbz x8, Lsub_tail Lsub_loop: ldp x4, x5, [x1], #16 ldp x6, x7, [x2], #16 sub x8, x8, #1 sbcs x4, x4, x6 sbcs x5, x5, x7 stp x4, x5, [x0], #16 cbnz x8, Lsub_loop Lsub_tail: cbz x3, Lsub_exit ldr x4, [x1], #8 ldr x6, [x2], #8 sbcs x4, x4, x6 str x4, [x0], #8 Lsub_exit: cset x0, cc ret #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
30,952
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/fipsmodule/armv8-mont.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .text .globl _bn_mul_mont .private_extern _bn_mul_mont .align 5 _bn_mul_mont: AARCH64_SIGN_LINK_REGISTER tst x5,#7 b.eq __bn_sqr8x_mont tst x5,#3 b.eq __bn_mul4x_mont Lmul_mont: stp x29,x30,[sp,#-64]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] ldr x9,[x2],#8 // bp[0] sub x22,sp,x5,lsl#3 ldp x7,x8,[x1],#16 // ap[0..1] lsl x5,x5,#3 ldr x4,[x4] // *n0 and x22,x22,#-16 // ABI says so ldp x13,x14,[x3],#16 // np[0..1] mul x6,x7,x9 // ap[0]*bp[0] sub x21,x5,#16 // j=num-2 umulh x7,x7,x9 mul x10,x8,x9 // ap[1]*bp[0] umulh x11,x8,x9 mul x15,x6,x4 // "tp[0]"*n0 mov sp,x22 // alloca // (*) mul x12,x13,x15 // np[0]*m1 umulh x13,x13,x15 mul x16,x14,x15 // np[1]*m1 // (*) adds x12,x12,x6 // discarded // (*) As for removal of first multiplication and addition // instructions. The outcome of first addition is // guaranteed to be zero, which leaves two computationally // significant outcomes: it either carries or not. Then // question is when does it carry? Is there alternative // way to deduce it? If you follow operations, you can // observe that condition for carry is quite simple: // x6 being non-zero. So that carry can be calculated // by adding -1 to x6. That's what next instruction does. subs xzr,x6,#1 // (*) umulh x17,x14,x15 adc x13,x13,xzr cbz x21,L1st_skip L1st: ldr x8,[x1],#8 adds x6,x10,x7 sub x21,x21,#8 // j-- adc x7,x11,xzr ldr x14,[x3],#8 adds x12,x16,x13 mul x10,x8,x9 // ap[j]*bp[0] adc x13,x17,xzr umulh x11,x8,x9 adds x12,x12,x6 mul x16,x14,x15 // np[j]*m1 adc x13,x13,xzr umulh x17,x14,x15 str x12,[x22],#8 // tp[j-1] cbnz x21,L1st L1st_skip: adds x6,x10,x7 sub x1,x1,x5 // rewind x1 adc x7,x11,xzr adds x12,x16,x13 sub x3,x3,x5 // rewind x3 adc x13,x17,xzr adds x12,x12,x6 sub x20,x5,#8 // i=num-1 adcs x13,x13,x7 adc x19,xzr,xzr // upmost overflow bit stp x12,x13,[x22] Louter: ldr x9,[x2],#8 // bp[i] ldp x7,x8,[x1],#16 ldr x23,[sp] // tp[0] add x22,sp,#8 mul x6,x7,x9 // ap[0]*bp[i] sub x21,x5,#16 // j=num-2 umulh x7,x7,x9 ldp x13,x14,[x3],#16 mul x10,x8,x9 // ap[1]*bp[i] adds x6,x6,x23 umulh x11,x8,x9 adc x7,x7,xzr mul x15,x6,x4 sub x20,x20,#8 // i-- // (*) mul x12,x13,x15 // np[0]*m1 umulh x13,x13,x15 mul x16,x14,x15 // np[1]*m1 // (*) adds x12,x12,x6 subs xzr,x6,#1 // (*) umulh x17,x14,x15 cbz x21,Linner_skip Linner: ldr x8,[x1],#8 adc x13,x13,xzr ldr x23,[x22],#8 // tp[j] adds x6,x10,x7 sub x21,x21,#8 // j-- adc x7,x11,xzr adds x12,x16,x13 ldr x14,[x3],#8 adc x13,x17,xzr mul x10,x8,x9 // ap[j]*bp[i] adds x6,x6,x23 umulh x11,x8,x9 adc x7,x7,xzr mul x16,x14,x15 // np[j]*m1 adds x12,x12,x6 umulh x17,x14,x15 str x12,[x22,#-16] // tp[j-1] cbnz x21,Linner Linner_skip: ldr x23,[x22],#8 // tp[j] adc x13,x13,xzr adds x6,x10,x7 sub x1,x1,x5 // rewind x1 adc x7,x11,xzr adds x12,x16,x13 sub x3,x3,x5 // rewind x3 adcs x13,x17,x19 adc x19,xzr,xzr adds x6,x6,x23 adc x7,x7,xzr adds x12,x12,x6 adcs x13,x13,x7 adc x19,x19,xzr // upmost overflow bit stp x12,x13,[x22,#-16] cbnz x20,Louter // Final step. We see if result is larger than modulus, and // if it is, subtract the modulus. But comparison implies // subtraction. So we subtract modulus, see if it borrowed, // and conditionally copy original value. ldr x23,[sp] // tp[0] add x22,sp,#8 ldr x14,[x3],#8 // np[0] subs x21,x5,#8 // j=num-1 and clear borrow mov x1,x0 Lsub: sbcs x8,x23,x14 // tp[j]-np[j] ldr x23,[x22],#8 sub x21,x21,#8 // j-- ldr x14,[x3],#8 str x8,[x1],#8 // rp[j]=tp[j]-np[j] cbnz x21,Lsub sbcs x8,x23,x14 sbcs x19,x19,xzr // did it borrow? str x8,[x1],#8 // rp[num-1] ldr x23,[sp] // tp[0] add x22,sp,#8 ldr x8,[x0],#8 // rp[0] sub x5,x5,#8 // num-- nop Lcond_copy: sub x5,x5,#8 // num-- csel x14,x23,x8,lo // did it borrow? ldr x23,[x22],#8 ldr x8,[x0],#8 str xzr,[x22,#-16] // wipe tp str x14,[x0,#-16] cbnz x5,Lcond_copy csel x14,x23,x8,lo str xzr,[x22,#-8] // wipe tp str x14,[x0,#-8] ldp x19,x20,[x29,#16] mov sp,x29 ldp x21,x22,[x29,#32] mov x0,#1 ldp x23,x24,[x29,#48] ldr x29,[sp],#64 AARCH64_VALIDATE_LINK_REGISTER ret .align 5 __bn_sqr8x_mont: // Not adding AARCH64_SIGN_LINK_REGISTER here because __bn_sqr8x_mont is jumped to // only from bn_mul_mont which has already signed the return address. cmp x1,x2 b.ne __bn_mul4x_mont Lsqr8x_mont: stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] stp x0,x3,[sp,#96] // offload rp and np ldp x6,x7,[x1,#8*0] ldp x8,x9,[x1,#8*2] ldp x10,x11,[x1,#8*4] ldp x12,x13,[x1,#8*6] sub x2,sp,x5,lsl#4 lsl x5,x5,#3 ldr x4,[x4] // *n0 mov sp,x2 // alloca sub x27,x5,#8*8 b Lsqr8x_zero_start Lsqr8x_zero: sub x27,x27,#8*8 stp xzr,xzr,[x2,#8*0] stp xzr,xzr,[x2,#8*2] stp xzr,xzr,[x2,#8*4] stp xzr,xzr,[x2,#8*6] Lsqr8x_zero_start: stp xzr,xzr,[x2,#8*8] stp xzr,xzr,[x2,#8*10] stp xzr,xzr,[x2,#8*12] stp xzr,xzr,[x2,#8*14] add x2,x2,#8*16 cbnz x27,Lsqr8x_zero add x3,x1,x5 add x1,x1,#8*8 mov x19,xzr mov x20,xzr mov x21,xzr mov x22,xzr mov x23,xzr mov x24,xzr mov x25,xzr mov x26,xzr mov x2,sp str x4,[x29,#112] // offload n0 // Multiply everything but a[i]*a[i] .align 4 Lsqr8x_outer_loop: // a[1]a[0] (i) // a[2]a[0] // a[3]a[0] // a[4]a[0] // a[5]a[0] // a[6]a[0] // a[7]a[0] // a[2]a[1] (ii) // a[3]a[1] // a[4]a[1] // a[5]a[1] // a[6]a[1] // a[7]a[1] // a[3]a[2] (iii) // a[4]a[2] // a[5]a[2] // a[6]a[2] // a[7]a[2] // a[4]a[3] (iv) // a[5]a[3] // a[6]a[3] // a[7]a[3] // a[5]a[4] (v) // a[6]a[4] // a[7]a[4] // a[6]a[5] (vi) // a[7]a[5] // a[7]a[6] (vii) mul x14,x7,x6 // lo(a[1..7]*a[0]) (i) mul x15,x8,x6 mul x16,x9,x6 mul x17,x10,x6 adds x20,x20,x14 // t[1]+lo(a[1]*a[0]) mul x14,x11,x6 adcs x21,x21,x15 mul x15,x12,x6 adcs x22,x22,x16 mul x16,x13,x6 adcs x23,x23,x17 umulh x17,x7,x6 // hi(a[1..7]*a[0]) adcs x24,x24,x14 umulh x14,x8,x6 adcs x25,x25,x15 umulh x15,x9,x6 adcs x26,x26,x16 umulh x16,x10,x6 stp x19,x20,[x2],#8*2 // t[0..1] adc x19,xzr,xzr // t[8] adds x21,x21,x17 // t[2]+lo(a[1]*a[0]) umulh x17,x11,x6 adcs x22,x22,x14 umulh x14,x12,x6 adcs x23,x23,x15 umulh x15,x13,x6 adcs x24,x24,x16 mul x16,x8,x7 // lo(a[2..7]*a[1]) (ii) adcs x25,x25,x17 mul x17,x9,x7 adcs x26,x26,x14 mul x14,x10,x7 adc x19,x19,x15 mul x15,x11,x7 adds x22,x22,x16 mul x16,x12,x7 adcs x23,x23,x17 mul x17,x13,x7 adcs x24,x24,x14 umulh x14,x8,x7 // hi(a[2..7]*a[1]) adcs x25,x25,x15 umulh x15,x9,x7 adcs x26,x26,x16 umulh x16,x10,x7 adcs x19,x19,x17 umulh x17,x11,x7 stp x21,x22,[x2],#8*2 // t[2..3] adc x20,xzr,xzr // t[9] adds x23,x23,x14 umulh x14,x12,x7 adcs x24,x24,x15 umulh x15,x13,x7 adcs x25,x25,x16 mul x16,x9,x8 // lo(a[3..7]*a[2]) (iii) adcs x26,x26,x17 mul x17,x10,x8 adcs x19,x19,x14 mul x14,x11,x8 adc x20,x20,x15 mul x15,x12,x8 adds x24,x24,x16 mul x16,x13,x8 adcs x25,x25,x17 umulh x17,x9,x8 // hi(a[3..7]*a[2]) adcs x26,x26,x14 umulh x14,x10,x8 adcs x19,x19,x15 umulh x15,x11,x8 adcs x20,x20,x16 umulh x16,x12,x8 stp x23,x24,[x2],#8*2 // t[4..5] adc x21,xzr,xzr // t[10] adds x25,x25,x17 umulh x17,x13,x8 adcs x26,x26,x14 mul x14,x10,x9 // lo(a[4..7]*a[3]) (iv) adcs x19,x19,x15 mul x15,x11,x9 adcs x20,x20,x16 mul x16,x12,x9 adc x21,x21,x17 mul x17,x13,x9 adds x26,x26,x14 umulh x14,x10,x9 // hi(a[4..7]*a[3]) adcs x19,x19,x15 umulh x15,x11,x9 adcs x20,x20,x16 umulh x16,x12,x9 adcs x21,x21,x17 umulh x17,x13,x9 stp x25,x26,[x2],#8*2 // t[6..7] adc x22,xzr,xzr // t[11] adds x19,x19,x14 mul x14,x11,x10 // lo(a[5..7]*a[4]) (v) adcs x20,x20,x15 mul x15,x12,x10 adcs x21,x21,x16 mul x16,x13,x10 adc x22,x22,x17 umulh x17,x11,x10 // hi(a[5..7]*a[4]) adds x20,x20,x14 umulh x14,x12,x10 adcs x21,x21,x15 umulh x15,x13,x10 adcs x22,x22,x16 mul x16,x12,x11 // lo(a[6..7]*a[5]) (vi) adc x23,xzr,xzr // t[12] adds x21,x21,x17 mul x17,x13,x11 adcs x22,x22,x14 umulh x14,x12,x11 // hi(a[6..7]*a[5]) adc x23,x23,x15 umulh x15,x13,x11 adds x22,x22,x16 mul x16,x13,x12 // lo(a[7]*a[6]) (vii) adcs x23,x23,x17 umulh x17,x13,x12 // hi(a[7]*a[6]) adc x24,xzr,xzr // t[13] adds x23,x23,x14 sub x27,x3,x1 // done yet? adc x24,x24,x15 adds x24,x24,x16 sub x14,x3,x5 // rewinded ap adc x25,xzr,xzr // t[14] add x25,x25,x17 cbz x27,Lsqr8x_outer_break mov x4,x6 ldp x6,x7,[x2,#8*0] ldp x8,x9,[x2,#8*2] ldp x10,x11,[x2,#8*4] ldp x12,x13,[x2,#8*6] adds x19,x19,x6 adcs x20,x20,x7 ldp x6,x7,[x1,#8*0] adcs x21,x21,x8 adcs x22,x22,x9 ldp x8,x9,[x1,#8*2] adcs x23,x23,x10 adcs x24,x24,x11 ldp x10,x11,[x1,#8*4] adcs x25,x25,x12 mov x0,x1 adcs x26,xzr,x13 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 //adc x28,xzr,xzr // moved below mov x27,#-8*8 // a[8]a[0] // a[9]a[0] // a[a]a[0] // a[b]a[0] // a[c]a[0] // a[d]a[0] // a[e]a[0] // a[f]a[0] // a[8]a[1] // a[f]a[1]........................ // a[8]a[2] // a[f]a[2]........................ // a[8]a[3] // a[f]a[3]........................ // a[8]a[4] // a[f]a[4]........................ // a[8]a[5] // a[f]a[5]........................ // a[8]a[6] // a[f]a[6]........................ // a[8]a[7] // a[f]a[7]........................ Lsqr8x_mul: mul x14,x6,x4 adc x28,xzr,xzr // carry bit, modulo-scheduled mul x15,x7,x4 add x27,x27,#8 mul x16,x8,x4 mul x17,x9,x4 adds x19,x19,x14 mul x14,x10,x4 adcs x20,x20,x15 mul x15,x11,x4 adcs x21,x21,x16 mul x16,x12,x4 adcs x22,x22,x17 mul x17,x13,x4 adcs x23,x23,x14 umulh x14,x6,x4 adcs x24,x24,x15 umulh x15,x7,x4 adcs x25,x25,x16 umulh x16,x8,x4 adcs x26,x26,x17 umulh x17,x9,x4 adc x28,x28,xzr str x19,[x2],#8 adds x19,x20,x14 umulh x14,x10,x4 adcs x20,x21,x15 umulh x15,x11,x4 adcs x21,x22,x16 umulh x16,x12,x4 adcs x22,x23,x17 umulh x17,x13,x4 ldr x4,[x0,x27] adcs x23,x24,x14 adcs x24,x25,x15 adcs x25,x26,x16 adcs x26,x28,x17 //adc x28,xzr,xzr // moved above cbnz x27,Lsqr8x_mul // note that carry flag is guaranteed // to be zero at this point cmp x1,x3 // done yet? b.eq Lsqr8x_break ldp x6,x7,[x2,#8*0] ldp x8,x9,[x2,#8*2] ldp x10,x11,[x2,#8*4] ldp x12,x13,[x2,#8*6] adds x19,x19,x6 ldr x4,[x0,#-8*8] adcs x20,x20,x7 ldp x6,x7,[x1,#8*0] adcs x21,x21,x8 adcs x22,x22,x9 ldp x8,x9,[x1,#8*2] adcs x23,x23,x10 adcs x24,x24,x11 ldp x10,x11,[x1,#8*4] adcs x25,x25,x12 mov x27,#-8*8 adcs x26,x26,x13 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 //adc x28,xzr,xzr // moved above b Lsqr8x_mul .align 4 Lsqr8x_break: ldp x6,x7,[x0,#8*0] add x1,x0,#8*8 ldp x8,x9,[x0,#8*2] sub x14,x3,x1 // is it last iteration? ldp x10,x11,[x0,#8*4] sub x15,x2,x14 ldp x12,x13,[x0,#8*6] cbz x14,Lsqr8x_outer_loop stp x19,x20,[x2,#8*0] ldp x19,x20,[x15,#8*0] stp x21,x22,[x2,#8*2] ldp x21,x22,[x15,#8*2] stp x23,x24,[x2,#8*4] ldp x23,x24,[x15,#8*4] stp x25,x26,[x2,#8*6] mov x2,x15 ldp x25,x26,[x15,#8*6] b Lsqr8x_outer_loop .align 4 Lsqr8x_outer_break: // Now multiply above result by 2 and add a[n-1]*a[n-1]|...|a[0]*a[0] ldp x7,x9,[x14,#8*0] // recall that x14 is &a[0] ldp x15,x16,[sp,#8*1] ldp x11,x13,[x14,#8*2] add x1,x14,#8*4 ldp x17,x14,[sp,#8*3] stp x19,x20,[x2,#8*0] mul x19,x7,x7 stp x21,x22,[x2,#8*2] umulh x7,x7,x7 stp x23,x24,[x2,#8*4] mul x8,x9,x9 stp x25,x26,[x2,#8*6] mov x2,sp umulh x9,x9,x9 adds x20,x7,x15,lsl#1 extr x15,x16,x15,#63 sub x27,x5,#8*4 Lsqr4x_shift_n_add: adcs x21,x8,x15 extr x16,x17,x16,#63 sub x27,x27,#8*4 adcs x22,x9,x16 ldp x15,x16,[x2,#8*5] mul x10,x11,x11 ldp x7,x9,[x1],#8*2 umulh x11,x11,x11 mul x12,x13,x13 umulh x13,x13,x13 extr x17,x14,x17,#63 stp x19,x20,[x2,#8*0] adcs x23,x10,x17 extr x14,x15,x14,#63 stp x21,x22,[x2,#8*2] adcs x24,x11,x14 ldp x17,x14,[x2,#8*7] extr x15,x16,x15,#63 adcs x25,x12,x15 extr x16,x17,x16,#63 adcs x26,x13,x16 ldp x15,x16,[x2,#8*9] mul x6,x7,x7 ldp x11,x13,[x1],#8*2 umulh x7,x7,x7 mul x8,x9,x9 umulh x9,x9,x9 stp x23,x24,[x2,#8*4] extr x17,x14,x17,#63 stp x25,x26,[x2,#8*6] add x2,x2,#8*8 adcs x19,x6,x17 extr x14,x15,x14,#63 adcs x20,x7,x14 ldp x17,x14,[x2,#8*3] extr x15,x16,x15,#63 cbnz x27,Lsqr4x_shift_n_add ldp x1,x4,[x29,#104] // pull np and n0 adcs x21,x8,x15 extr x16,x17,x16,#63 adcs x22,x9,x16 ldp x15,x16,[x2,#8*5] mul x10,x11,x11 umulh x11,x11,x11 stp x19,x20,[x2,#8*0] mul x12,x13,x13 umulh x13,x13,x13 stp x21,x22,[x2,#8*2] extr x17,x14,x17,#63 adcs x23,x10,x17 extr x14,x15,x14,#63 ldp x19,x20,[sp,#8*0] adcs x24,x11,x14 extr x15,x16,x15,#63 ldp x6,x7,[x1,#8*0] adcs x25,x12,x15 extr x16,xzr,x16,#63 ldp x8,x9,[x1,#8*2] adc x26,x13,x16 ldp x10,x11,[x1,#8*4] // Reduce by 512 bits per iteration mul x28,x4,x19 // t[0]*n0 ldp x12,x13,[x1,#8*6] add x3,x1,x5 ldp x21,x22,[sp,#8*2] stp x23,x24,[x2,#8*4] ldp x23,x24,[sp,#8*4] stp x25,x26,[x2,#8*6] ldp x25,x26,[sp,#8*6] add x1,x1,#8*8 mov x30,xzr // initial top-most carry mov x2,sp mov x27,#8 Lsqr8x_reduction: // (*) mul x14,x6,x28 // lo(n[0-7])*lo(t[0]*n0) mul x15,x7,x28 sub x27,x27,#1 mul x16,x8,x28 str x28,[x2],#8 // put aside t[0]*n0 for tail processing mul x17,x9,x28 // (*) adds xzr,x19,x14 subs xzr,x19,#1 // (*) mul x14,x10,x28 adcs x19,x20,x15 mul x15,x11,x28 adcs x20,x21,x16 mul x16,x12,x28 adcs x21,x22,x17 mul x17,x13,x28 adcs x22,x23,x14 umulh x14,x6,x28 // hi(n[0-7])*lo(t[0]*n0) adcs x23,x24,x15 umulh x15,x7,x28 adcs x24,x25,x16 umulh x16,x8,x28 adcs x25,x26,x17 umulh x17,x9,x28 adc x26,xzr,xzr adds x19,x19,x14 umulh x14,x10,x28 adcs x20,x20,x15 umulh x15,x11,x28 adcs x21,x21,x16 umulh x16,x12,x28 adcs x22,x22,x17 umulh x17,x13,x28 mul x28,x4,x19 // next t[0]*n0 adcs x23,x23,x14 adcs x24,x24,x15 adcs x25,x25,x16 adc x26,x26,x17 cbnz x27,Lsqr8x_reduction ldp x14,x15,[x2,#8*0] ldp x16,x17,[x2,#8*2] mov x0,x2 sub x27,x3,x1 // done yet? adds x19,x19,x14 adcs x20,x20,x15 ldp x14,x15,[x2,#8*4] adcs x21,x21,x16 adcs x22,x22,x17 ldp x16,x17,[x2,#8*6] adcs x23,x23,x14 adcs x24,x24,x15 adcs x25,x25,x16 adcs x26,x26,x17 //adc x28,xzr,xzr // moved below cbz x27,Lsqr8x8_post_condition ldr x4,[x2,#-8*8] ldp x6,x7,[x1,#8*0] ldp x8,x9,[x1,#8*2] ldp x10,x11,[x1,#8*4] mov x27,#-8*8 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 Lsqr8x_tail: mul x14,x6,x4 adc x28,xzr,xzr // carry bit, modulo-scheduled mul x15,x7,x4 add x27,x27,#8 mul x16,x8,x4 mul x17,x9,x4 adds x19,x19,x14 mul x14,x10,x4 adcs x20,x20,x15 mul x15,x11,x4 adcs x21,x21,x16 mul x16,x12,x4 adcs x22,x22,x17 mul x17,x13,x4 adcs x23,x23,x14 umulh x14,x6,x4 adcs x24,x24,x15 umulh x15,x7,x4 adcs x25,x25,x16 umulh x16,x8,x4 adcs x26,x26,x17 umulh x17,x9,x4 adc x28,x28,xzr str x19,[x2],#8 adds x19,x20,x14 umulh x14,x10,x4 adcs x20,x21,x15 umulh x15,x11,x4 adcs x21,x22,x16 umulh x16,x12,x4 adcs x22,x23,x17 umulh x17,x13,x4 ldr x4,[x0,x27] adcs x23,x24,x14 adcs x24,x25,x15 adcs x25,x26,x16 adcs x26,x28,x17 //adc x28,xzr,xzr // moved above cbnz x27,Lsqr8x_tail // note that carry flag is guaranteed // to be zero at this point ldp x6,x7,[x2,#8*0] sub x27,x3,x1 // done yet? sub x16,x3,x5 // rewinded np ldp x8,x9,[x2,#8*2] ldp x10,x11,[x2,#8*4] ldp x12,x13,[x2,#8*6] cbz x27,Lsqr8x_tail_break ldr x4,[x0,#-8*8] adds x19,x19,x6 adcs x20,x20,x7 ldp x6,x7,[x1,#8*0] adcs x21,x21,x8 adcs x22,x22,x9 ldp x8,x9,[x1,#8*2] adcs x23,x23,x10 adcs x24,x24,x11 ldp x10,x11,[x1,#8*4] adcs x25,x25,x12 mov x27,#-8*8 adcs x26,x26,x13 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 //adc x28,xzr,xzr // moved above b Lsqr8x_tail .align 4 Lsqr8x_tail_break: ldr x4,[x29,#112] // pull n0 add x27,x2,#8*8 // end of current t[num] window subs xzr,x30,#1 // "move" top-most carry to carry bit adcs x14,x19,x6 adcs x15,x20,x7 ldp x19,x20,[x0,#8*0] adcs x21,x21,x8 ldp x6,x7,[x16,#8*0] // recall that x16 is &n[0] adcs x22,x22,x9 ldp x8,x9,[x16,#8*2] adcs x23,x23,x10 adcs x24,x24,x11 ldp x10,x11,[x16,#8*4] adcs x25,x25,x12 adcs x26,x26,x13 ldp x12,x13,[x16,#8*6] add x1,x16,#8*8 adc x30,xzr,xzr // top-most carry mul x28,x4,x19 stp x14,x15,[x2,#8*0] stp x21,x22,[x2,#8*2] ldp x21,x22,[x0,#8*2] stp x23,x24,[x2,#8*4] ldp x23,x24,[x0,#8*4] cmp x27,x29 // did we hit the bottom? stp x25,x26,[x2,#8*6] mov x2,x0 // slide the window ldp x25,x26,[x0,#8*6] mov x27,#8 b.ne Lsqr8x_reduction // Final step. We see if result is larger than modulus, and // if it is, subtract the modulus. But comparison implies // subtraction. So we subtract modulus, see if it borrowed, // and conditionally copy original value. ldr x0,[x29,#96] // pull rp add x2,x2,#8*8 subs x14,x19,x6 sbcs x15,x20,x7 sub x27,x5,#8*8 mov x3,x0 // x0 copy Lsqr8x_sub: sbcs x16,x21,x8 ldp x6,x7,[x1,#8*0] sbcs x17,x22,x9 stp x14,x15,[x0,#8*0] sbcs x14,x23,x10 ldp x8,x9,[x1,#8*2] sbcs x15,x24,x11 stp x16,x17,[x0,#8*2] sbcs x16,x25,x12 ldp x10,x11,[x1,#8*4] sbcs x17,x26,x13 ldp x12,x13,[x1,#8*6] add x1,x1,#8*8 ldp x19,x20,[x2,#8*0] sub x27,x27,#8*8 ldp x21,x22,[x2,#8*2] ldp x23,x24,[x2,#8*4] ldp x25,x26,[x2,#8*6] add x2,x2,#8*8 stp x14,x15,[x0,#8*4] sbcs x14,x19,x6 stp x16,x17,[x0,#8*6] add x0,x0,#8*8 sbcs x15,x20,x7 cbnz x27,Lsqr8x_sub sbcs x16,x21,x8 mov x2,sp add x1,sp,x5 ldp x6,x7,[x3,#8*0] sbcs x17,x22,x9 stp x14,x15,[x0,#8*0] sbcs x14,x23,x10 ldp x8,x9,[x3,#8*2] sbcs x15,x24,x11 stp x16,x17,[x0,#8*2] sbcs x16,x25,x12 ldp x19,x20,[x1,#8*0] sbcs x17,x26,x13 ldp x21,x22,[x1,#8*2] sbcs xzr,x30,xzr // did it borrow? ldr x30,[x29,#8] // pull return address stp x14,x15,[x0,#8*4] stp x16,x17,[x0,#8*6] sub x27,x5,#8*4 Lsqr4x_cond_copy: sub x27,x27,#8*4 csel x14,x19,x6,lo stp xzr,xzr,[x2,#8*0] csel x15,x20,x7,lo ldp x6,x7,[x3,#8*4] ldp x19,x20,[x1,#8*4] csel x16,x21,x8,lo stp xzr,xzr,[x2,#8*2] add x2,x2,#8*4 csel x17,x22,x9,lo ldp x8,x9,[x3,#8*6] ldp x21,x22,[x1,#8*6] add x1,x1,#8*4 stp x14,x15,[x3,#8*0] stp x16,x17,[x3,#8*2] add x3,x3,#8*4 stp xzr,xzr,[x1,#8*0] stp xzr,xzr,[x1,#8*2] cbnz x27,Lsqr4x_cond_copy csel x14,x19,x6,lo stp xzr,xzr,[x2,#8*0] csel x15,x20,x7,lo stp xzr,xzr,[x2,#8*2] csel x16,x21,x8,lo csel x17,x22,x9,lo stp x14,x15,[x3,#8*0] stp x16,x17,[x3,#8*2] b Lsqr8x_done .align 4 Lsqr8x8_post_condition: adc x28,xzr,xzr ldr x30,[x29,#8] // pull return address // x19-7,x28 hold result, x6-7 hold modulus subs x6,x19,x6 ldr x1,[x29,#96] // pull rp sbcs x7,x20,x7 stp xzr,xzr,[sp,#8*0] sbcs x8,x21,x8 stp xzr,xzr,[sp,#8*2] sbcs x9,x22,x9 stp xzr,xzr,[sp,#8*4] sbcs x10,x23,x10 stp xzr,xzr,[sp,#8*6] sbcs x11,x24,x11 stp xzr,xzr,[sp,#8*8] sbcs x12,x25,x12 stp xzr,xzr,[sp,#8*10] sbcs x13,x26,x13 stp xzr,xzr,[sp,#8*12] sbcs x28,x28,xzr // did it borrow? stp xzr,xzr,[sp,#8*14] // x6-7 hold result-modulus csel x6,x19,x6,lo csel x7,x20,x7,lo csel x8,x21,x8,lo csel x9,x22,x9,lo stp x6,x7,[x1,#8*0] csel x10,x23,x10,lo csel x11,x24,x11,lo stp x8,x9,[x1,#8*2] csel x12,x25,x12,lo csel x13,x26,x13,lo stp x10,x11,[x1,#8*4] stp x12,x13,[x1,#8*6] Lsqr8x_done: ldp x19,x20,[x29,#16] mov sp,x29 ldp x21,x22,[x29,#32] mov x0,#1 ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldr x29,[sp],#128 // x30 is popped earlier AARCH64_VALIDATE_LINK_REGISTER ret .align 5 __bn_mul4x_mont: // Not adding AARCH64_SIGN_LINK_REGISTER here because __bn_mul4x_mont is jumped to // only from bn_mul_mont or __bn_mul8x_mont which have already signed the // return address. stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub x26,sp,x5,lsl#3 lsl x5,x5,#3 ldr x4,[x4] // *n0 sub sp,x26,#8*4 // alloca add x10,x2,x5 add x27,x1,x5 stp x0,x10,[x29,#96] // offload rp and &b[num] ldr x24,[x2,#8*0] // b[0] ldp x6,x7,[x1,#8*0] // a[0..3] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 mov x19,xzr mov x20,xzr mov x21,xzr mov x22,xzr ldp x14,x15,[x3,#8*0] // n[0..3] ldp x16,x17,[x3,#8*2] adds x3,x3,#8*4 // clear carry bit mov x0,xzr mov x28,#0 mov x26,sp Loop_mul4x_1st_reduction: mul x10,x6,x24 // lo(a[0..3]*b[0]) adc x0,x0,xzr // modulo-scheduled mul x11,x7,x24 add x28,x28,#8 mul x12,x8,x24 and x28,x28,#31 mul x13,x9,x24 adds x19,x19,x10 umulh x10,x6,x24 // hi(a[0..3]*b[0]) adcs x20,x20,x11 mul x25,x19,x4 // t[0]*n0 adcs x21,x21,x12 umulh x11,x7,x24 adcs x22,x22,x13 umulh x12,x8,x24 adc x23,xzr,xzr umulh x13,x9,x24 ldr x24,[x2,x28] // next b[i] (or b[0]) adds x20,x20,x10 // (*) mul x10,x14,x25 // lo(n[0..3]*t[0]*n0) str x25,[x26],#8 // put aside t[0]*n0 for tail processing adcs x21,x21,x11 mul x11,x15,x25 adcs x22,x22,x12 mul x12,x16,x25 adc x23,x23,x13 // can't overflow mul x13,x17,x25 // (*) adds xzr,x19,x10 subs xzr,x19,#1 // (*) umulh x10,x14,x25 // hi(n[0..3]*t[0]*n0) adcs x19,x20,x11 umulh x11,x15,x25 adcs x20,x21,x12 umulh x12,x16,x25 adcs x21,x22,x13 umulh x13,x17,x25 adcs x22,x23,x0 adc x0,xzr,xzr adds x19,x19,x10 sub x10,x27,x1 adcs x20,x20,x11 adcs x21,x21,x12 adcs x22,x22,x13 //adc x0,x0,xzr cbnz x28,Loop_mul4x_1st_reduction cbz x10,Lmul4x4_post_condition ldp x6,x7,[x1,#8*0] // a[4..7] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 ldr x25,[sp] // a[0]*n0 ldp x14,x15,[x3,#8*0] // n[4..7] ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 Loop_mul4x_1st_tail: mul x10,x6,x24 // lo(a[4..7]*b[i]) adc x0,x0,xzr // modulo-scheduled mul x11,x7,x24 add x28,x28,#8 mul x12,x8,x24 and x28,x28,#31 mul x13,x9,x24 adds x19,x19,x10 umulh x10,x6,x24 // hi(a[4..7]*b[i]) adcs x20,x20,x11 umulh x11,x7,x24 adcs x21,x21,x12 umulh x12,x8,x24 adcs x22,x22,x13 umulh x13,x9,x24 adc x23,xzr,xzr ldr x24,[x2,x28] // next b[i] (or b[0]) adds x20,x20,x10 mul x10,x14,x25 // lo(n[4..7]*a[0]*n0) adcs x21,x21,x11 mul x11,x15,x25 adcs x22,x22,x12 mul x12,x16,x25 adc x23,x23,x13 // can't overflow mul x13,x17,x25 adds x19,x19,x10 umulh x10,x14,x25 // hi(n[4..7]*a[0]*n0) adcs x20,x20,x11 umulh x11,x15,x25 adcs x21,x21,x12 umulh x12,x16,x25 adcs x22,x22,x13 adcs x23,x23,x0 umulh x13,x17,x25 adc x0,xzr,xzr ldr x25,[sp,x28] // next t[0]*n0 str x19,[x26],#8 // result!!! adds x19,x20,x10 sub x10,x27,x1 // done yet? adcs x20,x21,x11 adcs x21,x22,x12 adcs x22,x23,x13 //adc x0,x0,xzr cbnz x28,Loop_mul4x_1st_tail sub x11,x27,x5 // rewinded x1 cbz x10,Lmul4x_proceed ldp x6,x7,[x1,#8*0] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 ldp x14,x15,[x3,#8*0] ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 b Loop_mul4x_1st_tail .align 5 Lmul4x_proceed: ldr x24,[x2,#8*4]! // *++b adc x30,x0,xzr ldp x6,x7,[x11,#8*0] // a[0..3] sub x3,x3,x5 // rewind np ldp x8,x9,[x11,#8*2] add x1,x11,#8*4 stp x19,x20,[x26,#8*0] // result!!! ldp x19,x20,[sp,#8*4] // t[0..3] stp x21,x22,[x26,#8*2] // result!!! ldp x21,x22,[sp,#8*6] ldp x14,x15,[x3,#8*0] // n[0..3] mov x26,sp ldp x16,x17,[x3,#8*2] adds x3,x3,#8*4 // clear carry bit mov x0,xzr .align 4 Loop_mul4x_reduction: mul x10,x6,x24 // lo(a[0..3]*b[4]) adc x0,x0,xzr // modulo-scheduled mul x11,x7,x24 add x28,x28,#8 mul x12,x8,x24 and x28,x28,#31 mul x13,x9,x24 adds x19,x19,x10 umulh x10,x6,x24 // hi(a[0..3]*b[4]) adcs x20,x20,x11 mul x25,x19,x4 // t[0]*n0 adcs x21,x21,x12 umulh x11,x7,x24 adcs x22,x22,x13 umulh x12,x8,x24 adc x23,xzr,xzr umulh x13,x9,x24 ldr x24,[x2,x28] // next b[i] adds x20,x20,x10 // (*) mul x10,x14,x25 str x25,[x26],#8 // put aside t[0]*n0 for tail processing adcs x21,x21,x11 mul x11,x15,x25 // lo(n[0..3]*t[0]*n0 adcs x22,x22,x12 mul x12,x16,x25 adc x23,x23,x13 // can't overflow mul x13,x17,x25 // (*) adds xzr,x19,x10 subs xzr,x19,#1 // (*) umulh x10,x14,x25 // hi(n[0..3]*t[0]*n0 adcs x19,x20,x11 umulh x11,x15,x25 adcs x20,x21,x12 umulh x12,x16,x25 adcs x21,x22,x13 umulh x13,x17,x25 adcs x22,x23,x0 adc x0,xzr,xzr adds x19,x19,x10 adcs x20,x20,x11 adcs x21,x21,x12 adcs x22,x22,x13 //adc x0,x0,xzr cbnz x28,Loop_mul4x_reduction adc x0,x0,xzr ldp x10,x11,[x26,#8*4] // t[4..7] ldp x12,x13,[x26,#8*6] ldp x6,x7,[x1,#8*0] // a[4..7] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 adds x19,x19,x10 adcs x20,x20,x11 adcs x21,x21,x12 adcs x22,x22,x13 //adc x0,x0,xzr ldr x25,[sp] // t[0]*n0 ldp x14,x15,[x3,#8*0] // n[4..7] ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 .align 4 Loop_mul4x_tail: mul x10,x6,x24 // lo(a[4..7]*b[4]) adc x0,x0,xzr // modulo-scheduled mul x11,x7,x24 add x28,x28,#8 mul x12,x8,x24 and x28,x28,#31 mul x13,x9,x24 adds x19,x19,x10 umulh x10,x6,x24 // hi(a[4..7]*b[4]) adcs x20,x20,x11 umulh x11,x7,x24 adcs x21,x21,x12 umulh x12,x8,x24 adcs x22,x22,x13 umulh x13,x9,x24 adc x23,xzr,xzr ldr x24,[x2,x28] // next b[i] adds x20,x20,x10 mul x10,x14,x25 // lo(n[4..7]*t[0]*n0) adcs x21,x21,x11 mul x11,x15,x25 adcs x22,x22,x12 mul x12,x16,x25 adc x23,x23,x13 // can't overflow mul x13,x17,x25 adds x19,x19,x10 umulh x10,x14,x25 // hi(n[4..7]*t[0]*n0) adcs x20,x20,x11 umulh x11,x15,x25 adcs x21,x21,x12 umulh x12,x16,x25 adcs x22,x22,x13 umulh x13,x17,x25 adcs x23,x23,x0 ldr x25,[sp,x28] // next a[0]*n0 adc x0,xzr,xzr str x19,[x26],#8 // result!!! adds x19,x20,x10 sub x10,x27,x1 // done yet? adcs x20,x21,x11 adcs x21,x22,x12 adcs x22,x23,x13 //adc x0,x0,xzr cbnz x28,Loop_mul4x_tail sub x11,x3,x5 // rewinded np? adc x0,x0,xzr cbz x10,Loop_mul4x_break ldp x10,x11,[x26,#8*4] ldp x12,x13,[x26,#8*6] ldp x6,x7,[x1,#8*0] ldp x8,x9,[x1,#8*2] add x1,x1,#8*4 adds x19,x19,x10 adcs x20,x20,x11 adcs x21,x21,x12 adcs x22,x22,x13 //adc x0,x0,xzr ldp x14,x15,[x3,#8*0] ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 b Loop_mul4x_tail .align 4 Loop_mul4x_break: ldp x12,x13,[x29,#96] // pull rp and &b[num] adds x19,x19,x30 add x2,x2,#8*4 // bp++ adcs x20,x20,xzr sub x1,x1,x5 // rewind ap adcs x21,x21,xzr stp x19,x20,[x26,#8*0] // result!!! adcs x22,x22,xzr ldp x19,x20,[sp,#8*4] // t[0..3] adc x30,x0,xzr stp x21,x22,[x26,#8*2] // result!!! cmp x2,x13 // done yet? ldp x21,x22,[sp,#8*6] ldp x14,x15,[x11,#8*0] // n[0..3] ldp x16,x17,[x11,#8*2] add x3,x11,#8*4 b.eq Lmul4x_post ldr x24,[x2] ldp x6,x7,[x1,#8*0] // a[0..3] ldp x8,x9,[x1,#8*2] adds x1,x1,#8*4 // clear carry bit mov x0,xzr mov x26,sp b Loop_mul4x_reduction .align 4 Lmul4x_post: // Final step. We see if result is larger than modulus, and // if it is, subtract the modulus. But comparison implies // subtraction. So we subtract modulus, see if it borrowed, // and conditionally copy original value. mov x0,x12 mov x27,x12 // x0 copy subs x10,x19,x14 add x26,sp,#8*8 sbcs x11,x20,x15 sub x28,x5,#8*4 Lmul4x_sub: sbcs x12,x21,x16 ldp x14,x15,[x3,#8*0] sub x28,x28,#8*4 ldp x19,x20,[x26,#8*0] sbcs x13,x22,x17 ldp x16,x17,[x3,#8*2] add x3,x3,#8*4 ldp x21,x22,[x26,#8*2] add x26,x26,#8*4 stp x10,x11,[x0,#8*0] sbcs x10,x19,x14 stp x12,x13,[x0,#8*2] add x0,x0,#8*4 sbcs x11,x20,x15 cbnz x28,Lmul4x_sub sbcs x12,x21,x16 mov x26,sp add x1,sp,#8*4 ldp x6,x7,[x27,#8*0] sbcs x13,x22,x17 stp x10,x11,[x0,#8*0] ldp x8,x9,[x27,#8*2] stp x12,x13,[x0,#8*2] ldp x19,x20,[x1,#8*0] ldp x21,x22,[x1,#8*2] sbcs xzr,x30,xzr // did it borrow? ldr x30,[x29,#8] // pull return address sub x28,x5,#8*4 Lmul4x_cond_copy: sub x28,x28,#8*4 csel x10,x19,x6,lo stp xzr,xzr,[x26,#8*0] csel x11,x20,x7,lo ldp x6,x7,[x27,#8*4] ldp x19,x20,[x1,#8*4] csel x12,x21,x8,lo stp xzr,xzr,[x26,#8*2] add x26,x26,#8*4 csel x13,x22,x9,lo ldp x8,x9,[x27,#8*6] ldp x21,x22,[x1,#8*6] add x1,x1,#8*4 stp x10,x11,[x27,#8*0] stp x12,x13,[x27,#8*2] add x27,x27,#8*4 cbnz x28,Lmul4x_cond_copy csel x10,x19,x6,lo stp xzr,xzr,[x26,#8*0] csel x11,x20,x7,lo stp xzr,xzr,[x26,#8*2] csel x12,x21,x8,lo stp xzr,xzr,[x26,#8*3] csel x13,x22,x9,lo stp xzr,xzr,[x26,#8*4] stp x10,x11,[x27,#8*0] stp x12,x13,[x27,#8*2] b Lmul4x_done .align 4 Lmul4x4_post_condition: adc x0,x0,xzr ldr x1,[x29,#96] // pull rp // x19-3,x0 hold result, x14-7 hold modulus subs x6,x19,x14 ldr x30,[x29,#8] // pull return address sbcs x7,x20,x15 stp xzr,xzr,[sp,#8*0] sbcs x8,x21,x16 stp xzr,xzr,[sp,#8*2] sbcs x9,x22,x17 stp xzr,xzr,[sp,#8*4] sbcs xzr,x0,xzr // did it borrow? stp xzr,xzr,[sp,#8*6] // x6-3 hold result-modulus csel x6,x19,x6,lo csel x7,x20,x7,lo csel x8,x21,x8,lo csel x9,x22,x9,lo stp x6,x7,[x1,#8*0] stp x8,x9,[x1,#8*2] Lmul4x_done: ldp x19,x20,[x29,#16] mov sp,x29 ldp x21,x22,[x29,#32] mov x0,#1 ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldr x29,[sp],#128 // x30 is popped earlier AARCH64_VALIDATE_LINK_REGISTER ret .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 4 #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
40,214
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/chacha/chacha-armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .section __TEXT,__const .align 5 Lsigma: .quad 0x3320646e61707865,0x6b20657479622d32 // endian-neutral Lone: .long 1,0,0,0 .byte 67,104,97,67,104,97,50,48,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .text .globl _ChaCha20_ctr32_nohw .private_extern _ChaCha20_ctr32_nohw .align 5 _ChaCha20_ctr32_nohw: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 adrp x5,Lsigma@PAGE add x5,x5,Lsigma@PAGEOFF stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#64 ldp x22,x23,[x5] // load sigma ldp x24,x25,[x3] // load key ldp x26,x27,[x3,#16] ldp x28,x30,[x4] // load counter #ifdef __AARCH64EB__ ror x24,x24,#32 ror x25,x25,#32 ror x26,x26,#32 ror x27,x27,#32 ror x28,x28,#32 ror x30,x30,#32 #endif Loop_outer: mov w5,w22 // unpack key block lsr x6,x22,#32 mov w7,w23 lsr x8,x23,#32 mov w9,w24 lsr x10,x24,#32 mov w11,w25 lsr x12,x25,#32 mov w13,w26 lsr x14,x26,#32 mov w15,w27 lsr x16,x27,#32 mov w17,w28 lsr x19,x28,#32 mov w20,w30 lsr x21,x30,#32 mov x4,#10 subs x2,x2,#64 Loop: sub x4,x4,#1 add w5,w5,w9 add w6,w6,w10 add w7,w7,w11 add w8,w8,w12 eor w17,w17,w5 eor w19,w19,w6 eor w20,w20,w7 eor w21,w21,w8 ror w17,w17,#16 ror w19,w19,#16 ror w20,w20,#16 ror w21,w21,#16 add w13,w13,w17 add w14,w14,w19 add w15,w15,w20 add w16,w16,w21 eor w9,w9,w13 eor w10,w10,w14 eor w11,w11,w15 eor w12,w12,w16 ror w9,w9,#20 ror w10,w10,#20 ror w11,w11,#20 ror w12,w12,#20 add w5,w5,w9 add w6,w6,w10 add w7,w7,w11 add w8,w8,w12 eor w17,w17,w5 eor w19,w19,w6 eor w20,w20,w7 eor w21,w21,w8 ror w17,w17,#24 ror w19,w19,#24 ror w20,w20,#24 ror w21,w21,#24 add w13,w13,w17 add w14,w14,w19 add w15,w15,w20 add w16,w16,w21 eor w9,w9,w13 eor w10,w10,w14 eor w11,w11,w15 eor w12,w12,w16 ror w9,w9,#25 ror w10,w10,#25 ror w11,w11,#25 ror w12,w12,#25 add w5,w5,w10 add w6,w6,w11 add w7,w7,w12 add w8,w8,w9 eor w21,w21,w5 eor w17,w17,w6 eor w19,w19,w7 eor w20,w20,w8 ror w21,w21,#16 ror w17,w17,#16 ror w19,w19,#16 ror w20,w20,#16 add w15,w15,w21 add w16,w16,w17 add w13,w13,w19 add w14,w14,w20 eor w10,w10,w15 eor w11,w11,w16 eor w12,w12,w13 eor w9,w9,w14 ror w10,w10,#20 ror w11,w11,#20 ror w12,w12,#20 ror w9,w9,#20 add w5,w5,w10 add w6,w6,w11 add w7,w7,w12 add w8,w8,w9 eor w21,w21,w5 eor w17,w17,w6 eor w19,w19,w7 eor w20,w20,w8 ror w21,w21,#24 ror w17,w17,#24 ror w19,w19,#24 ror w20,w20,#24 add w15,w15,w21 add w16,w16,w17 add w13,w13,w19 add w14,w14,w20 eor w10,w10,w15 eor w11,w11,w16 eor w12,w12,w13 eor w9,w9,w14 ror w10,w10,#25 ror w11,w11,#25 ror w12,w12,#25 ror w9,w9,#25 cbnz x4,Loop add w5,w5,w22 // accumulate key block add x6,x6,x22,lsr#32 add w7,w7,w23 add x8,x8,x23,lsr#32 add w9,w9,w24 add x10,x10,x24,lsr#32 add w11,w11,w25 add x12,x12,x25,lsr#32 add w13,w13,w26 add x14,x14,x26,lsr#32 add w15,w15,w27 add x16,x16,x27,lsr#32 add w17,w17,w28 add x19,x19,x28,lsr#32 add w20,w20,w30 add x21,x21,x30,lsr#32 b.lo Ltail add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 ldp x6,x8,[x1,#0] // load input add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 ldp x10,x12,[x1,#16] add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 ldp x14,x16,[x1,#32] add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 ldp x19,x21,[x1,#48] add x1,x1,#64 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor x15,x15,x16 eor x17,x17,x19 eor x20,x20,x21 stp x5,x7,[x0,#0] // store output add x28,x28,#1 // increment counter stp x9,x11,[x0,#16] stp x13,x15,[x0,#32] stp x17,x20,[x0,#48] add x0,x0,#64 b.hi Loop_outer ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .align 4 Ltail: add x2,x2,#64 Less_than_64: sub x0,x0,#1 add x1,x1,x2 add x0,x0,x2 add x4,sp,x2 neg x2,x2 add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif stp x5,x7,[sp,#0] stp x9,x11,[sp,#16] stp x13,x15,[sp,#32] stp x17,x20,[sp,#48] Loop_tail: ldrb w10,[x1,x2] ldrb w11,[x4,x2] add x2,x2,#1 eor w10,w10,w11 strb w10,[x0,x2] cbnz x2,Loop_tail stp xzr,xzr,[sp,#0] stp xzr,xzr,[sp,#16] stp xzr,xzr,[sp,#32] stp xzr,xzr,[sp,#48] ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .globl _ChaCha20_ctr32_neon .private_extern _ChaCha20_ctr32_neon .align 5 _ChaCha20_ctr32_neon: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 adrp x5,Lsigma@PAGE add x5,x5,Lsigma@PAGEOFF stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] cmp x2,#512 b.hs L512_or_more_neon sub sp,sp,#64 ldp x22,x23,[x5] // load sigma ld1 {v24.4s},[x5],#16 ldp x24,x25,[x3] // load key ldp x26,x27,[x3,#16] ld1 {v25.4s,v26.4s},[x3] ldp x28,x30,[x4] // load counter ld1 {v27.4s},[x4] ld1 {v31.4s},[x5] #ifdef __AARCH64EB__ rev64 v24.4s,v24.4s ror x24,x24,#32 ror x25,x25,#32 ror x26,x26,#32 ror x27,x27,#32 ror x28,x28,#32 ror x30,x30,#32 #endif add v27.4s,v27.4s,v31.4s // += 1 add v28.4s,v27.4s,v31.4s add v29.4s,v28.4s,v31.4s shl v31.4s,v31.4s,#2 // 1 -> 4 Loop_outer_neon: mov w5,w22 // unpack key block lsr x6,x22,#32 mov v0.16b,v24.16b mov w7,w23 lsr x8,x23,#32 mov v4.16b,v24.16b mov w9,w24 lsr x10,x24,#32 mov v16.16b,v24.16b mov w11,w25 mov v1.16b,v25.16b lsr x12,x25,#32 mov v5.16b,v25.16b mov w13,w26 mov v17.16b,v25.16b lsr x14,x26,#32 mov v3.16b,v27.16b mov w15,w27 mov v7.16b,v28.16b lsr x16,x27,#32 mov v19.16b,v29.16b mov w17,w28 mov v2.16b,v26.16b lsr x19,x28,#32 mov v6.16b,v26.16b mov w20,w30 mov v18.16b,v26.16b lsr x21,x30,#32 mov x4,#10 subs x2,x2,#256 Loop_neon: sub x4,x4,#1 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v16.4s,v16.4s,v17.4s add w7,w7,w11 eor v3.16b,v3.16b,v0.16b add w8,w8,w12 eor v7.16b,v7.16b,v4.16b eor w17,w17,w5 eor v19.16b,v19.16b,v16.16b eor w19,w19,w6 rev32 v3.8h,v3.8h eor w20,w20,w7 rev32 v7.8h,v7.8h eor w21,w21,w8 rev32 v19.8h,v19.8h ror w17,w17,#16 add v2.4s,v2.4s,v3.4s ror w19,w19,#16 add v6.4s,v6.4s,v7.4s ror w20,w20,#16 add v18.4s,v18.4s,v19.4s ror w21,w21,#16 eor v20.16b,v1.16b,v2.16b add w13,w13,w17 eor v21.16b,v5.16b,v6.16b add w14,w14,w19 eor v22.16b,v17.16b,v18.16b add w15,w15,w20 ushr v1.4s,v20.4s,#20 add w16,w16,w21 ushr v5.4s,v21.4s,#20 eor w9,w9,w13 ushr v17.4s,v22.4s,#20 eor w10,w10,w14 sli v1.4s,v20.4s,#12 eor w11,w11,w15 sli v5.4s,v21.4s,#12 eor w12,w12,w16 sli v17.4s,v22.4s,#12 ror w9,w9,#20 add v0.4s,v0.4s,v1.4s ror w10,w10,#20 add v4.4s,v4.4s,v5.4s ror w11,w11,#20 add v16.4s,v16.4s,v17.4s ror w12,w12,#20 eor v20.16b,v3.16b,v0.16b add w5,w5,w9 eor v21.16b,v7.16b,v4.16b add w6,w6,w10 eor v22.16b,v19.16b,v16.16b add w7,w7,w11 ushr v3.4s,v20.4s,#24 add w8,w8,w12 ushr v7.4s,v21.4s,#24 eor w17,w17,w5 ushr v19.4s,v22.4s,#24 eor w19,w19,w6 sli v3.4s,v20.4s,#8 eor w20,w20,w7 sli v7.4s,v21.4s,#8 eor w21,w21,w8 sli v19.4s,v22.4s,#8 ror w17,w17,#24 add v2.4s,v2.4s,v3.4s ror w19,w19,#24 add v6.4s,v6.4s,v7.4s ror w20,w20,#24 add v18.4s,v18.4s,v19.4s ror w21,w21,#24 eor v20.16b,v1.16b,v2.16b add w13,w13,w17 eor v21.16b,v5.16b,v6.16b add w14,w14,w19 eor v22.16b,v17.16b,v18.16b add w15,w15,w20 ushr v1.4s,v20.4s,#25 add w16,w16,w21 ushr v5.4s,v21.4s,#25 eor w9,w9,w13 ushr v17.4s,v22.4s,#25 eor w10,w10,w14 sli v1.4s,v20.4s,#7 eor w11,w11,w15 sli v5.4s,v21.4s,#7 eor w12,w12,w16 sli v17.4s,v22.4s,#7 ror w9,w9,#25 ext v2.16b,v2.16b,v2.16b,#8 ror w10,w10,#25 ext v6.16b,v6.16b,v6.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v3.16b,v3.16b,v3.16b,#12 ext v7.16b,v7.16b,v7.16b,#12 ext v19.16b,v19.16b,v19.16b,#12 ext v1.16b,v1.16b,v1.16b,#4 ext v5.16b,v5.16b,v5.16b,#4 ext v17.16b,v17.16b,v17.16b,#4 add v0.4s,v0.4s,v1.4s add w5,w5,w10 add v4.4s,v4.4s,v5.4s add w6,w6,w11 add v16.4s,v16.4s,v17.4s add w7,w7,w12 eor v3.16b,v3.16b,v0.16b add w8,w8,w9 eor v7.16b,v7.16b,v4.16b eor w21,w21,w5 eor v19.16b,v19.16b,v16.16b eor w17,w17,w6 rev32 v3.8h,v3.8h eor w19,w19,w7 rev32 v7.8h,v7.8h eor w20,w20,w8 rev32 v19.8h,v19.8h ror w21,w21,#16 add v2.4s,v2.4s,v3.4s ror w17,w17,#16 add v6.4s,v6.4s,v7.4s ror w19,w19,#16 add v18.4s,v18.4s,v19.4s ror w20,w20,#16 eor v20.16b,v1.16b,v2.16b add w15,w15,w21 eor v21.16b,v5.16b,v6.16b add w16,w16,w17 eor v22.16b,v17.16b,v18.16b add w13,w13,w19 ushr v1.4s,v20.4s,#20 add w14,w14,w20 ushr v5.4s,v21.4s,#20 eor w10,w10,w15 ushr v17.4s,v22.4s,#20 eor w11,w11,w16 sli v1.4s,v20.4s,#12 eor w12,w12,w13 sli v5.4s,v21.4s,#12 eor w9,w9,w14 sli v17.4s,v22.4s,#12 ror w10,w10,#20 add v0.4s,v0.4s,v1.4s ror w11,w11,#20 add v4.4s,v4.4s,v5.4s ror w12,w12,#20 add v16.4s,v16.4s,v17.4s ror w9,w9,#20 eor v20.16b,v3.16b,v0.16b add w5,w5,w10 eor v21.16b,v7.16b,v4.16b add w6,w6,w11 eor v22.16b,v19.16b,v16.16b add w7,w7,w12 ushr v3.4s,v20.4s,#24 add w8,w8,w9 ushr v7.4s,v21.4s,#24 eor w21,w21,w5 ushr v19.4s,v22.4s,#24 eor w17,w17,w6 sli v3.4s,v20.4s,#8 eor w19,w19,w7 sli v7.4s,v21.4s,#8 eor w20,w20,w8 sli v19.4s,v22.4s,#8 ror w21,w21,#24 add v2.4s,v2.4s,v3.4s ror w17,w17,#24 add v6.4s,v6.4s,v7.4s ror w19,w19,#24 add v18.4s,v18.4s,v19.4s ror w20,w20,#24 eor v20.16b,v1.16b,v2.16b add w15,w15,w21 eor v21.16b,v5.16b,v6.16b add w16,w16,w17 eor v22.16b,v17.16b,v18.16b add w13,w13,w19 ushr v1.4s,v20.4s,#25 add w14,w14,w20 ushr v5.4s,v21.4s,#25 eor w10,w10,w15 ushr v17.4s,v22.4s,#25 eor w11,w11,w16 sli v1.4s,v20.4s,#7 eor w12,w12,w13 sli v5.4s,v21.4s,#7 eor w9,w9,w14 sli v17.4s,v22.4s,#7 ror w10,w10,#25 ext v2.16b,v2.16b,v2.16b,#8 ror w11,w11,#25 ext v6.16b,v6.16b,v6.16b,#8 ror w12,w12,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#4 ext v7.16b,v7.16b,v7.16b,#4 ext v19.16b,v19.16b,v19.16b,#4 ext v1.16b,v1.16b,v1.16b,#12 ext v5.16b,v5.16b,v5.16b,#12 ext v17.16b,v17.16b,v17.16b,#12 cbnz x4,Loop_neon add w5,w5,w22 // accumulate key block add v0.4s,v0.4s,v24.4s add x6,x6,x22,lsr#32 add v4.4s,v4.4s,v24.4s add w7,w7,w23 add v16.4s,v16.4s,v24.4s add x8,x8,x23,lsr#32 add v2.4s,v2.4s,v26.4s add w9,w9,w24 add v6.4s,v6.4s,v26.4s add x10,x10,x24,lsr#32 add v18.4s,v18.4s,v26.4s add w11,w11,w25 add v3.4s,v3.4s,v27.4s add x12,x12,x25,lsr#32 add w13,w13,w26 add v7.4s,v7.4s,v28.4s add x14,x14,x26,lsr#32 add w15,w15,w27 add v19.4s,v19.4s,v29.4s add x16,x16,x27,lsr#32 add w17,w17,w28 add v1.4s,v1.4s,v25.4s add x19,x19,x28,lsr#32 add w20,w20,w30 add v5.4s,v5.4s,v25.4s add x21,x21,x30,lsr#32 add v17.4s,v17.4s,v25.4s b.lo Ltail_neon add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 ldp x6,x8,[x1,#0] // load input add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 ldp x10,x12,[x1,#16] add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 ldp x14,x16,[x1,#32] add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 ldp x19,x21,[x1,#48] add x1,x1,#64 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor v0.16b,v0.16b,v20.16b eor x15,x15,x16 eor v1.16b,v1.16b,v21.16b eor x17,x17,x19 eor v2.16b,v2.16b,v22.16b eor x20,x20,x21 eor v3.16b,v3.16b,v23.16b ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 stp x5,x7,[x0,#0] // store output add x28,x28,#4 // increment counter stp x9,x11,[x0,#16] add v27.4s,v27.4s,v31.4s // += 4 stp x13,x15,[x0,#32] add v28.4s,v28.4s,v31.4s stp x17,x20,[x0,#48] add v29.4s,v29.4s,v31.4s add x0,x0,#64 st1 {v0.16b,v1.16b,v2.16b,v3.16b},[x0],#64 ld1 {v0.16b,v1.16b,v2.16b,v3.16b},[x1],#64 eor v4.16b,v4.16b,v20.16b eor v5.16b,v5.16b,v21.16b eor v6.16b,v6.16b,v22.16b eor v7.16b,v7.16b,v23.16b st1 {v4.16b,v5.16b,v6.16b,v7.16b},[x0],#64 eor v16.16b,v16.16b,v0.16b eor v17.16b,v17.16b,v1.16b eor v18.16b,v18.16b,v2.16b eor v19.16b,v19.16b,v3.16b st1 {v16.16b,v17.16b,v18.16b,v19.16b},[x0],#64 b.hi Loop_outer_neon ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret Ltail_neon: add x2,x2,#256 cmp x2,#64 b.lo Less_than_64 add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 ldp x6,x8,[x1,#0] // load input add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 ldp x10,x12,[x1,#16] add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 ldp x14,x16,[x1,#32] add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 ldp x19,x21,[x1,#48] add x1,x1,#64 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor x15,x15,x16 eor x17,x17,x19 eor x20,x20,x21 stp x5,x7,[x0,#0] // store output add x28,x28,#4 // increment counter stp x9,x11,[x0,#16] stp x13,x15,[x0,#32] stp x17,x20,[x0,#48] add x0,x0,#64 b.eq Ldone_neon sub x2,x2,#64 cmp x2,#64 b.lo Less_than_128 ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 eor v0.16b,v0.16b,v20.16b eor v1.16b,v1.16b,v21.16b eor v2.16b,v2.16b,v22.16b eor v3.16b,v3.16b,v23.16b st1 {v0.16b,v1.16b,v2.16b,v3.16b},[x0],#64 b.eq Ldone_neon sub x2,x2,#64 cmp x2,#64 b.lo Less_than_192 ld1 {v20.16b,v21.16b,v22.16b,v23.16b},[x1],#64 eor v4.16b,v4.16b,v20.16b eor v5.16b,v5.16b,v21.16b eor v6.16b,v6.16b,v22.16b eor v7.16b,v7.16b,v23.16b st1 {v4.16b,v5.16b,v6.16b,v7.16b},[x0],#64 b.eq Ldone_neon sub x2,x2,#64 st1 {v16.16b,v17.16b,v18.16b,v19.16b},[sp] b Last_neon Less_than_128: st1 {v0.16b,v1.16b,v2.16b,v3.16b},[sp] b Last_neon Less_than_192: st1 {v4.16b,v5.16b,v6.16b,v7.16b},[sp] b Last_neon .align 4 Last_neon: sub x0,x0,#1 add x1,x1,x2 add x0,x0,x2 add x4,sp,x2 neg x2,x2 Loop_tail_neon: ldrb w10,[x1,x2] ldrb w11,[x4,x2] add x2,x2,#1 eor w10,w10,w11 strb w10,[x0,x2] cbnz x2,Loop_tail_neon stp xzr,xzr,[sp,#0] stp xzr,xzr,[sp,#16] stp xzr,xzr,[sp,#32] stp xzr,xzr,[sp,#48] Ldone_neon: ldp x19,x20,[x29,#16] add sp,sp,#64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret .align 5 ChaCha20_512_neon: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-96]! add x29,sp,#0 adrp x5,Lsigma@PAGE add x5,x5,Lsigma@PAGEOFF stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] L512_or_more_neon: sub sp,sp,#128+64 ldp x22,x23,[x5] // load sigma ld1 {v24.4s},[x5],#16 ldp x24,x25,[x3] // load key ldp x26,x27,[x3,#16] ld1 {v25.4s,v26.4s},[x3] ldp x28,x30,[x4] // load counter ld1 {v27.4s},[x4] ld1 {v31.4s},[x5] #ifdef __AARCH64EB__ rev64 v24.4s,v24.4s ror x24,x24,#32 ror x25,x25,#32 ror x26,x26,#32 ror x27,x27,#32 ror x28,x28,#32 ror x30,x30,#32 #endif add v27.4s,v27.4s,v31.4s // += 1 stp q24,q25,[sp,#0] // off-load key block, invariant part add v27.4s,v27.4s,v31.4s // not typo str q26,[sp,#32] add v28.4s,v27.4s,v31.4s add v29.4s,v28.4s,v31.4s add v30.4s,v29.4s,v31.4s shl v31.4s,v31.4s,#2 // 1 -> 4 stp d8,d9,[sp,#128+0] // meet ABI requirements stp d10,d11,[sp,#128+16] stp d12,d13,[sp,#128+32] stp d14,d15,[sp,#128+48] sub x2,x2,#512 // not typo Loop_outer_512_neon: mov v0.16b,v24.16b mov v4.16b,v24.16b mov v8.16b,v24.16b mov v12.16b,v24.16b mov v16.16b,v24.16b mov v20.16b,v24.16b mov v1.16b,v25.16b mov w5,w22 // unpack key block mov v5.16b,v25.16b lsr x6,x22,#32 mov v9.16b,v25.16b mov w7,w23 mov v13.16b,v25.16b lsr x8,x23,#32 mov v17.16b,v25.16b mov w9,w24 mov v21.16b,v25.16b lsr x10,x24,#32 mov v3.16b,v27.16b mov w11,w25 mov v7.16b,v28.16b lsr x12,x25,#32 mov v11.16b,v29.16b mov w13,w26 mov v15.16b,v30.16b lsr x14,x26,#32 mov v2.16b,v26.16b mov w15,w27 mov v6.16b,v26.16b lsr x16,x27,#32 add v19.4s,v3.4s,v31.4s // +4 mov w17,w28 add v23.4s,v7.4s,v31.4s // +4 lsr x19,x28,#32 mov v10.16b,v26.16b mov w20,w30 mov v14.16b,v26.16b lsr x21,x30,#32 mov v18.16b,v26.16b stp q27,q28,[sp,#48] // off-load key block, variable part mov v22.16b,v26.16b str q29,[sp,#80] mov x4,#5 subs x2,x2,#512 Loop_upper_neon: sub x4,x4,#1 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v8.4s,v8.4s,v9.4s add w7,w7,w11 add v12.4s,v12.4s,v13.4s add w8,w8,w12 add v16.4s,v16.4s,v17.4s eor w17,w17,w5 add v20.4s,v20.4s,v21.4s eor w19,w19,w6 eor v3.16b,v3.16b,v0.16b eor w20,w20,w7 eor v7.16b,v7.16b,v4.16b eor w21,w21,w8 eor v11.16b,v11.16b,v8.16b ror w17,w17,#16 eor v15.16b,v15.16b,v12.16b ror w19,w19,#16 eor v19.16b,v19.16b,v16.16b ror w20,w20,#16 eor v23.16b,v23.16b,v20.16b ror w21,w21,#16 rev32 v3.8h,v3.8h add w13,w13,w17 rev32 v7.8h,v7.8h add w14,w14,w19 rev32 v11.8h,v11.8h add w15,w15,w20 rev32 v15.8h,v15.8h add w16,w16,w21 rev32 v19.8h,v19.8h eor w9,w9,w13 rev32 v23.8h,v23.8h eor w10,w10,w14 add v2.4s,v2.4s,v3.4s eor w11,w11,w15 add v6.4s,v6.4s,v7.4s eor w12,w12,w16 add v10.4s,v10.4s,v11.4s ror w9,w9,#20 add v14.4s,v14.4s,v15.4s ror w10,w10,#20 add v18.4s,v18.4s,v19.4s ror w11,w11,#20 add v22.4s,v22.4s,v23.4s ror w12,w12,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w9 eor v25.16b,v5.16b,v6.16b add w6,w6,w10 eor v26.16b,v9.16b,v10.16b add w7,w7,w11 eor v27.16b,v13.16b,v14.16b add w8,w8,w12 eor v28.16b,v17.16b,v18.16b eor w17,w17,w5 eor v29.16b,v21.16b,v22.16b eor w19,w19,w6 ushr v1.4s,v24.4s,#20 eor w20,w20,w7 ushr v5.4s,v25.4s,#20 eor w21,w21,w8 ushr v9.4s,v26.4s,#20 ror w17,w17,#24 ushr v13.4s,v27.4s,#20 ror w19,w19,#24 ushr v17.4s,v28.4s,#20 ror w20,w20,#24 ushr v21.4s,v29.4s,#20 ror w21,w21,#24 sli v1.4s,v24.4s,#12 add w13,w13,w17 sli v5.4s,v25.4s,#12 add w14,w14,w19 sli v9.4s,v26.4s,#12 add w15,w15,w20 sli v13.4s,v27.4s,#12 add w16,w16,w21 sli v17.4s,v28.4s,#12 eor w9,w9,w13 sli v21.4s,v29.4s,#12 eor w10,w10,w14 add v0.4s,v0.4s,v1.4s eor w11,w11,w15 add v4.4s,v4.4s,v5.4s eor w12,w12,w16 add v8.4s,v8.4s,v9.4s ror w9,w9,#25 add v12.4s,v12.4s,v13.4s ror w10,w10,#25 add v16.4s,v16.4s,v17.4s ror w11,w11,#25 add v20.4s,v20.4s,v21.4s ror w12,w12,#25 eor v24.16b,v3.16b,v0.16b add w5,w5,w10 eor v25.16b,v7.16b,v4.16b add w6,w6,w11 eor v26.16b,v11.16b,v8.16b add w7,w7,w12 eor v27.16b,v15.16b,v12.16b add w8,w8,w9 eor v28.16b,v19.16b,v16.16b eor w21,w21,w5 eor v29.16b,v23.16b,v20.16b eor w17,w17,w6 ushr v3.4s,v24.4s,#24 eor w19,w19,w7 ushr v7.4s,v25.4s,#24 eor w20,w20,w8 ushr v11.4s,v26.4s,#24 ror w21,w21,#16 ushr v15.4s,v27.4s,#24 ror w17,w17,#16 ushr v19.4s,v28.4s,#24 ror w19,w19,#16 ushr v23.4s,v29.4s,#24 ror w20,w20,#16 sli v3.4s,v24.4s,#8 add w15,w15,w21 sli v7.4s,v25.4s,#8 add w16,w16,w17 sli v11.4s,v26.4s,#8 add w13,w13,w19 sli v15.4s,v27.4s,#8 add w14,w14,w20 sli v19.4s,v28.4s,#8 eor w10,w10,w15 sli v23.4s,v29.4s,#8 eor w11,w11,w16 add v2.4s,v2.4s,v3.4s eor w12,w12,w13 add v6.4s,v6.4s,v7.4s eor w9,w9,w14 add v10.4s,v10.4s,v11.4s ror w10,w10,#20 add v14.4s,v14.4s,v15.4s ror w11,w11,#20 add v18.4s,v18.4s,v19.4s ror w12,w12,#20 add v22.4s,v22.4s,v23.4s ror w9,w9,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w10 eor v25.16b,v5.16b,v6.16b add w6,w6,w11 eor v26.16b,v9.16b,v10.16b add w7,w7,w12 eor v27.16b,v13.16b,v14.16b add w8,w8,w9 eor v28.16b,v17.16b,v18.16b eor w21,w21,w5 eor v29.16b,v21.16b,v22.16b eor w17,w17,w6 ushr v1.4s,v24.4s,#25 eor w19,w19,w7 ushr v5.4s,v25.4s,#25 eor w20,w20,w8 ushr v9.4s,v26.4s,#25 ror w21,w21,#24 ushr v13.4s,v27.4s,#25 ror w17,w17,#24 ushr v17.4s,v28.4s,#25 ror w19,w19,#24 ushr v21.4s,v29.4s,#25 ror w20,w20,#24 sli v1.4s,v24.4s,#7 add w15,w15,w21 sli v5.4s,v25.4s,#7 add w16,w16,w17 sli v9.4s,v26.4s,#7 add w13,w13,w19 sli v13.4s,v27.4s,#7 add w14,w14,w20 sli v17.4s,v28.4s,#7 eor w10,w10,w15 sli v21.4s,v29.4s,#7 eor w11,w11,w16 ext v2.16b,v2.16b,v2.16b,#8 eor w12,w12,w13 ext v6.16b,v6.16b,v6.16b,#8 eor w9,w9,w14 ext v10.16b,v10.16b,v10.16b,#8 ror w10,w10,#25 ext v14.16b,v14.16b,v14.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v22.16b,v22.16b,v22.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#12 ext v7.16b,v7.16b,v7.16b,#12 ext v11.16b,v11.16b,v11.16b,#12 ext v15.16b,v15.16b,v15.16b,#12 ext v19.16b,v19.16b,v19.16b,#12 ext v23.16b,v23.16b,v23.16b,#12 ext v1.16b,v1.16b,v1.16b,#4 ext v5.16b,v5.16b,v5.16b,#4 ext v9.16b,v9.16b,v9.16b,#4 ext v13.16b,v13.16b,v13.16b,#4 ext v17.16b,v17.16b,v17.16b,#4 ext v21.16b,v21.16b,v21.16b,#4 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v8.4s,v8.4s,v9.4s add w7,w7,w11 add v12.4s,v12.4s,v13.4s add w8,w8,w12 add v16.4s,v16.4s,v17.4s eor w17,w17,w5 add v20.4s,v20.4s,v21.4s eor w19,w19,w6 eor v3.16b,v3.16b,v0.16b eor w20,w20,w7 eor v7.16b,v7.16b,v4.16b eor w21,w21,w8 eor v11.16b,v11.16b,v8.16b ror w17,w17,#16 eor v15.16b,v15.16b,v12.16b ror w19,w19,#16 eor v19.16b,v19.16b,v16.16b ror w20,w20,#16 eor v23.16b,v23.16b,v20.16b ror w21,w21,#16 rev32 v3.8h,v3.8h add w13,w13,w17 rev32 v7.8h,v7.8h add w14,w14,w19 rev32 v11.8h,v11.8h add w15,w15,w20 rev32 v15.8h,v15.8h add w16,w16,w21 rev32 v19.8h,v19.8h eor w9,w9,w13 rev32 v23.8h,v23.8h eor w10,w10,w14 add v2.4s,v2.4s,v3.4s eor w11,w11,w15 add v6.4s,v6.4s,v7.4s eor w12,w12,w16 add v10.4s,v10.4s,v11.4s ror w9,w9,#20 add v14.4s,v14.4s,v15.4s ror w10,w10,#20 add v18.4s,v18.4s,v19.4s ror w11,w11,#20 add v22.4s,v22.4s,v23.4s ror w12,w12,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w9 eor v25.16b,v5.16b,v6.16b add w6,w6,w10 eor v26.16b,v9.16b,v10.16b add w7,w7,w11 eor v27.16b,v13.16b,v14.16b add w8,w8,w12 eor v28.16b,v17.16b,v18.16b eor w17,w17,w5 eor v29.16b,v21.16b,v22.16b eor w19,w19,w6 ushr v1.4s,v24.4s,#20 eor w20,w20,w7 ushr v5.4s,v25.4s,#20 eor w21,w21,w8 ushr v9.4s,v26.4s,#20 ror w17,w17,#24 ushr v13.4s,v27.4s,#20 ror w19,w19,#24 ushr v17.4s,v28.4s,#20 ror w20,w20,#24 ushr v21.4s,v29.4s,#20 ror w21,w21,#24 sli v1.4s,v24.4s,#12 add w13,w13,w17 sli v5.4s,v25.4s,#12 add w14,w14,w19 sli v9.4s,v26.4s,#12 add w15,w15,w20 sli v13.4s,v27.4s,#12 add w16,w16,w21 sli v17.4s,v28.4s,#12 eor w9,w9,w13 sli v21.4s,v29.4s,#12 eor w10,w10,w14 add v0.4s,v0.4s,v1.4s eor w11,w11,w15 add v4.4s,v4.4s,v5.4s eor w12,w12,w16 add v8.4s,v8.4s,v9.4s ror w9,w9,#25 add v12.4s,v12.4s,v13.4s ror w10,w10,#25 add v16.4s,v16.4s,v17.4s ror w11,w11,#25 add v20.4s,v20.4s,v21.4s ror w12,w12,#25 eor v24.16b,v3.16b,v0.16b add w5,w5,w10 eor v25.16b,v7.16b,v4.16b add w6,w6,w11 eor v26.16b,v11.16b,v8.16b add w7,w7,w12 eor v27.16b,v15.16b,v12.16b add w8,w8,w9 eor v28.16b,v19.16b,v16.16b eor w21,w21,w5 eor v29.16b,v23.16b,v20.16b eor w17,w17,w6 ushr v3.4s,v24.4s,#24 eor w19,w19,w7 ushr v7.4s,v25.4s,#24 eor w20,w20,w8 ushr v11.4s,v26.4s,#24 ror w21,w21,#16 ushr v15.4s,v27.4s,#24 ror w17,w17,#16 ushr v19.4s,v28.4s,#24 ror w19,w19,#16 ushr v23.4s,v29.4s,#24 ror w20,w20,#16 sli v3.4s,v24.4s,#8 add w15,w15,w21 sli v7.4s,v25.4s,#8 add w16,w16,w17 sli v11.4s,v26.4s,#8 add w13,w13,w19 sli v15.4s,v27.4s,#8 add w14,w14,w20 sli v19.4s,v28.4s,#8 eor w10,w10,w15 sli v23.4s,v29.4s,#8 eor w11,w11,w16 add v2.4s,v2.4s,v3.4s eor w12,w12,w13 add v6.4s,v6.4s,v7.4s eor w9,w9,w14 add v10.4s,v10.4s,v11.4s ror w10,w10,#20 add v14.4s,v14.4s,v15.4s ror w11,w11,#20 add v18.4s,v18.4s,v19.4s ror w12,w12,#20 add v22.4s,v22.4s,v23.4s ror w9,w9,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w10 eor v25.16b,v5.16b,v6.16b add w6,w6,w11 eor v26.16b,v9.16b,v10.16b add w7,w7,w12 eor v27.16b,v13.16b,v14.16b add w8,w8,w9 eor v28.16b,v17.16b,v18.16b eor w21,w21,w5 eor v29.16b,v21.16b,v22.16b eor w17,w17,w6 ushr v1.4s,v24.4s,#25 eor w19,w19,w7 ushr v5.4s,v25.4s,#25 eor w20,w20,w8 ushr v9.4s,v26.4s,#25 ror w21,w21,#24 ushr v13.4s,v27.4s,#25 ror w17,w17,#24 ushr v17.4s,v28.4s,#25 ror w19,w19,#24 ushr v21.4s,v29.4s,#25 ror w20,w20,#24 sli v1.4s,v24.4s,#7 add w15,w15,w21 sli v5.4s,v25.4s,#7 add w16,w16,w17 sli v9.4s,v26.4s,#7 add w13,w13,w19 sli v13.4s,v27.4s,#7 add w14,w14,w20 sli v17.4s,v28.4s,#7 eor w10,w10,w15 sli v21.4s,v29.4s,#7 eor w11,w11,w16 ext v2.16b,v2.16b,v2.16b,#8 eor w12,w12,w13 ext v6.16b,v6.16b,v6.16b,#8 eor w9,w9,w14 ext v10.16b,v10.16b,v10.16b,#8 ror w10,w10,#25 ext v14.16b,v14.16b,v14.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v22.16b,v22.16b,v22.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#4 ext v7.16b,v7.16b,v7.16b,#4 ext v11.16b,v11.16b,v11.16b,#4 ext v15.16b,v15.16b,v15.16b,#4 ext v19.16b,v19.16b,v19.16b,#4 ext v23.16b,v23.16b,v23.16b,#4 ext v1.16b,v1.16b,v1.16b,#12 ext v5.16b,v5.16b,v5.16b,#12 ext v9.16b,v9.16b,v9.16b,#12 ext v13.16b,v13.16b,v13.16b,#12 ext v17.16b,v17.16b,v17.16b,#12 ext v21.16b,v21.16b,v21.16b,#12 cbnz x4,Loop_upper_neon add w5,w5,w22 // accumulate key block add x6,x6,x22,lsr#32 add w7,w7,w23 add x8,x8,x23,lsr#32 add w9,w9,w24 add x10,x10,x24,lsr#32 add w11,w11,w25 add x12,x12,x25,lsr#32 add w13,w13,w26 add x14,x14,x26,lsr#32 add w15,w15,w27 add x16,x16,x27,lsr#32 add w17,w17,w28 add x19,x19,x28,lsr#32 add w20,w20,w30 add x21,x21,x30,lsr#32 add x5,x5,x6,lsl#32 // pack add x7,x7,x8,lsl#32 ldp x6,x8,[x1,#0] // load input add x9,x9,x10,lsl#32 add x11,x11,x12,lsl#32 ldp x10,x12,[x1,#16] add x13,x13,x14,lsl#32 add x15,x15,x16,lsl#32 ldp x14,x16,[x1,#32] add x17,x17,x19,lsl#32 add x20,x20,x21,lsl#32 ldp x19,x21,[x1,#48] add x1,x1,#64 #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor x15,x15,x16 eor x17,x17,x19 eor x20,x20,x21 stp x5,x7,[x0,#0] // store output add x28,x28,#1 // increment counter mov w5,w22 // unpack key block lsr x6,x22,#32 stp x9,x11,[x0,#16] mov w7,w23 lsr x8,x23,#32 stp x13,x15,[x0,#32] mov w9,w24 lsr x10,x24,#32 stp x17,x20,[x0,#48] add x0,x0,#64 mov w11,w25 lsr x12,x25,#32 mov w13,w26 lsr x14,x26,#32 mov w15,w27 lsr x16,x27,#32 mov w17,w28 lsr x19,x28,#32 mov w20,w30 lsr x21,x30,#32 mov x4,#5 Loop_lower_neon: sub x4,x4,#1 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v8.4s,v8.4s,v9.4s add w7,w7,w11 add v12.4s,v12.4s,v13.4s add w8,w8,w12 add v16.4s,v16.4s,v17.4s eor w17,w17,w5 add v20.4s,v20.4s,v21.4s eor w19,w19,w6 eor v3.16b,v3.16b,v0.16b eor w20,w20,w7 eor v7.16b,v7.16b,v4.16b eor w21,w21,w8 eor v11.16b,v11.16b,v8.16b ror w17,w17,#16 eor v15.16b,v15.16b,v12.16b ror w19,w19,#16 eor v19.16b,v19.16b,v16.16b ror w20,w20,#16 eor v23.16b,v23.16b,v20.16b ror w21,w21,#16 rev32 v3.8h,v3.8h add w13,w13,w17 rev32 v7.8h,v7.8h add w14,w14,w19 rev32 v11.8h,v11.8h add w15,w15,w20 rev32 v15.8h,v15.8h add w16,w16,w21 rev32 v19.8h,v19.8h eor w9,w9,w13 rev32 v23.8h,v23.8h eor w10,w10,w14 add v2.4s,v2.4s,v3.4s eor w11,w11,w15 add v6.4s,v6.4s,v7.4s eor w12,w12,w16 add v10.4s,v10.4s,v11.4s ror w9,w9,#20 add v14.4s,v14.4s,v15.4s ror w10,w10,#20 add v18.4s,v18.4s,v19.4s ror w11,w11,#20 add v22.4s,v22.4s,v23.4s ror w12,w12,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w9 eor v25.16b,v5.16b,v6.16b add w6,w6,w10 eor v26.16b,v9.16b,v10.16b add w7,w7,w11 eor v27.16b,v13.16b,v14.16b add w8,w8,w12 eor v28.16b,v17.16b,v18.16b eor w17,w17,w5 eor v29.16b,v21.16b,v22.16b eor w19,w19,w6 ushr v1.4s,v24.4s,#20 eor w20,w20,w7 ushr v5.4s,v25.4s,#20 eor w21,w21,w8 ushr v9.4s,v26.4s,#20 ror w17,w17,#24 ushr v13.4s,v27.4s,#20 ror w19,w19,#24 ushr v17.4s,v28.4s,#20 ror w20,w20,#24 ushr v21.4s,v29.4s,#20 ror w21,w21,#24 sli v1.4s,v24.4s,#12 add w13,w13,w17 sli v5.4s,v25.4s,#12 add w14,w14,w19 sli v9.4s,v26.4s,#12 add w15,w15,w20 sli v13.4s,v27.4s,#12 add w16,w16,w21 sli v17.4s,v28.4s,#12 eor w9,w9,w13 sli v21.4s,v29.4s,#12 eor w10,w10,w14 add v0.4s,v0.4s,v1.4s eor w11,w11,w15 add v4.4s,v4.4s,v5.4s eor w12,w12,w16 add v8.4s,v8.4s,v9.4s ror w9,w9,#25 add v12.4s,v12.4s,v13.4s ror w10,w10,#25 add v16.4s,v16.4s,v17.4s ror w11,w11,#25 add v20.4s,v20.4s,v21.4s ror w12,w12,#25 eor v24.16b,v3.16b,v0.16b add w5,w5,w10 eor v25.16b,v7.16b,v4.16b add w6,w6,w11 eor v26.16b,v11.16b,v8.16b add w7,w7,w12 eor v27.16b,v15.16b,v12.16b add w8,w8,w9 eor v28.16b,v19.16b,v16.16b eor w21,w21,w5 eor v29.16b,v23.16b,v20.16b eor w17,w17,w6 ushr v3.4s,v24.4s,#24 eor w19,w19,w7 ushr v7.4s,v25.4s,#24 eor w20,w20,w8 ushr v11.4s,v26.4s,#24 ror w21,w21,#16 ushr v15.4s,v27.4s,#24 ror w17,w17,#16 ushr v19.4s,v28.4s,#24 ror w19,w19,#16 ushr v23.4s,v29.4s,#24 ror w20,w20,#16 sli v3.4s,v24.4s,#8 add w15,w15,w21 sli v7.4s,v25.4s,#8 add w16,w16,w17 sli v11.4s,v26.4s,#8 add w13,w13,w19 sli v15.4s,v27.4s,#8 add w14,w14,w20 sli v19.4s,v28.4s,#8 eor w10,w10,w15 sli v23.4s,v29.4s,#8 eor w11,w11,w16 add v2.4s,v2.4s,v3.4s eor w12,w12,w13 add v6.4s,v6.4s,v7.4s eor w9,w9,w14 add v10.4s,v10.4s,v11.4s ror w10,w10,#20 add v14.4s,v14.4s,v15.4s ror w11,w11,#20 add v18.4s,v18.4s,v19.4s ror w12,w12,#20 add v22.4s,v22.4s,v23.4s ror w9,w9,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w10 eor v25.16b,v5.16b,v6.16b add w6,w6,w11 eor v26.16b,v9.16b,v10.16b add w7,w7,w12 eor v27.16b,v13.16b,v14.16b add w8,w8,w9 eor v28.16b,v17.16b,v18.16b eor w21,w21,w5 eor v29.16b,v21.16b,v22.16b eor w17,w17,w6 ushr v1.4s,v24.4s,#25 eor w19,w19,w7 ushr v5.4s,v25.4s,#25 eor w20,w20,w8 ushr v9.4s,v26.4s,#25 ror w21,w21,#24 ushr v13.4s,v27.4s,#25 ror w17,w17,#24 ushr v17.4s,v28.4s,#25 ror w19,w19,#24 ushr v21.4s,v29.4s,#25 ror w20,w20,#24 sli v1.4s,v24.4s,#7 add w15,w15,w21 sli v5.4s,v25.4s,#7 add w16,w16,w17 sli v9.4s,v26.4s,#7 add w13,w13,w19 sli v13.4s,v27.4s,#7 add w14,w14,w20 sli v17.4s,v28.4s,#7 eor w10,w10,w15 sli v21.4s,v29.4s,#7 eor w11,w11,w16 ext v2.16b,v2.16b,v2.16b,#8 eor w12,w12,w13 ext v6.16b,v6.16b,v6.16b,#8 eor w9,w9,w14 ext v10.16b,v10.16b,v10.16b,#8 ror w10,w10,#25 ext v14.16b,v14.16b,v14.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v22.16b,v22.16b,v22.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#12 ext v7.16b,v7.16b,v7.16b,#12 ext v11.16b,v11.16b,v11.16b,#12 ext v15.16b,v15.16b,v15.16b,#12 ext v19.16b,v19.16b,v19.16b,#12 ext v23.16b,v23.16b,v23.16b,#12 ext v1.16b,v1.16b,v1.16b,#4 ext v5.16b,v5.16b,v5.16b,#4 ext v9.16b,v9.16b,v9.16b,#4 ext v13.16b,v13.16b,v13.16b,#4 ext v17.16b,v17.16b,v17.16b,#4 ext v21.16b,v21.16b,v21.16b,#4 add v0.4s,v0.4s,v1.4s add w5,w5,w9 add v4.4s,v4.4s,v5.4s add w6,w6,w10 add v8.4s,v8.4s,v9.4s add w7,w7,w11 add v12.4s,v12.4s,v13.4s add w8,w8,w12 add v16.4s,v16.4s,v17.4s eor w17,w17,w5 add v20.4s,v20.4s,v21.4s eor w19,w19,w6 eor v3.16b,v3.16b,v0.16b eor w20,w20,w7 eor v7.16b,v7.16b,v4.16b eor w21,w21,w8 eor v11.16b,v11.16b,v8.16b ror w17,w17,#16 eor v15.16b,v15.16b,v12.16b ror w19,w19,#16 eor v19.16b,v19.16b,v16.16b ror w20,w20,#16 eor v23.16b,v23.16b,v20.16b ror w21,w21,#16 rev32 v3.8h,v3.8h add w13,w13,w17 rev32 v7.8h,v7.8h add w14,w14,w19 rev32 v11.8h,v11.8h add w15,w15,w20 rev32 v15.8h,v15.8h add w16,w16,w21 rev32 v19.8h,v19.8h eor w9,w9,w13 rev32 v23.8h,v23.8h eor w10,w10,w14 add v2.4s,v2.4s,v3.4s eor w11,w11,w15 add v6.4s,v6.4s,v7.4s eor w12,w12,w16 add v10.4s,v10.4s,v11.4s ror w9,w9,#20 add v14.4s,v14.4s,v15.4s ror w10,w10,#20 add v18.4s,v18.4s,v19.4s ror w11,w11,#20 add v22.4s,v22.4s,v23.4s ror w12,w12,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w9 eor v25.16b,v5.16b,v6.16b add w6,w6,w10 eor v26.16b,v9.16b,v10.16b add w7,w7,w11 eor v27.16b,v13.16b,v14.16b add w8,w8,w12 eor v28.16b,v17.16b,v18.16b eor w17,w17,w5 eor v29.16b,v21.16b,v22.16b eor w19,w19,w6 ushr v1.4s,v24.4s,#20 eor w20,w20,w7 ushr v5.4s,v25.4s,#20 eor w21,w21,w8 ushr v9.4s,v26.4s,#20 ror w17,w17,#24 ushr v13.4s,v27.4s,#20 ror w19,w19,#24 ushr v17.4s,v28.4s,#20 ror w20,w20,#24 ushr v21.4s,v29.4s,#20 ror w21,w21,#24 sli v1.4s,v24.4s,#12 add w13,w13,w17 sli v5.4s,v25.4s,#12 add w14,w14,w19 sli v9.4s,v26.4s,#12 add w15,w15,w20 sli v13.4s,v27.4s,#12 add w16,w16,w21 sli v17.4s,v28.4s,#12 eor w9,w9,w13 sli v21.4s,v29.4s,#12 eor w10,w10,w14 add v0.4s,v0.4s,v1.4s eor w11,w11,w15 add v4.4s,v4.4s,v5.4s eor w12,w12,w16 add v8.4s,v8.4s,v9.4s ror w9,w9,#25 add v12.4s,v12.4s,v13.4s ror w10,w10,#25 add v16.4s,v16.4s,v17.4s ror w11,w11,#25 add v20.4s,v20.4s,v21.4s ror w12,w12,#25 eor v24.16b,v3.16b,v0.16b add w5,w5,w10 eor v25.16b,v7.16b,v4.16b add w6,w6,w11 eor v26.16b,v11.16b,v8.16b add w7,w7,w12 eor v27.16b,v15.16b,v12.16b add w8,w8,w9 eor v28.16b,v19.16b,v16.16b eor w21,w21,w5 eor v29.16b,v23.16b,v20.16b eor w17,w17,w6 ushr v3.4s,v24.4s,#24 eor w19,w19,w7 ushr v7.4s,v25.4s,#24 eor w20,w20,w8 ushr v11.4s,v26.4s,#24 ror w21,w21,#16 ushr v15.4s,v27.4s,#24 ror w17,w17,#16 ushr v19.4s,v28.4s,#24 ror w19,w19,#16 ushr v23.4s,v29.4s,#24 ror w20,w20,#16 sli v3.4s,v24.4s,#8 add w15,w15,w21 sli v7.4s,v25.4s,#8 add w16,w16,w17 sli v11.4s,v26.4s,#8 add w13,w13,w19 sli v15.4s,v27.4s,#8 add w14,w14,w20 sli v19.4s,v28.4s,#8 eor w10,w10,w15 sli v23.4s,v29.4s,#8 eor w11,w11,w16 add v2.4s,v2.4s,v3.4s eor w12,w12,w13 add v6.4s,v6.4s,v7.4s eor w9,w9,w14 add v10.4s,v10.4s,v11.4s ror w10,w10,#20 add v14.4s,v14.4s,v15.4s ror w11,w11,#20 add v18.4s,v18.4s,v19.4s ror w12,w12,#20 add v22.4s,v22.4s,v23.4s ror w9,w9,#20 eor v24.16b,v1.16b,v2.16b add w5,w5,w10 eor v25.16b,v5.16b,v6.16b add w6,w6,w11 eor v26.16b,v9.16b,v10.16b add w7,w7,w12 eor v27.16b,v13.16b,v14.16b add w8,w8,w9 eor v28.16b,v17.16b,v18.16b eor w21,w21,w5 eor v29.16b,v21.16b,v22.16b eor w17,w17,w6 ushr v1.4s,v24.4s,#25 eor w19,w19,w7 ushr v5.4s,v25.4s,#25 eor w20,w20,w8 ushr v9.4s,v26.4s,#25 ror w21,w21,#24 ushr v13.4s,v27.4s,#25 ror w17,w17,#24 ushr v17.4s,v28.4s,#25 ror w19,w19,#24 ushr v21.4s,v29.4s,#25 ror w20,w20,#24 sli v1.4s,v24.4s,#7 add w15,w15,w21 sli v5.4s,v25.4s,#7 add w16,w16,w17 sli v9.4s,v26.4s,#7 add w13,w13,w19 sli v13.4s,v27.4s,#7 add w14,w14,w20 sli v17.4s,v28.4s,#7 eor w10,w10,w15 sli v21.4s,v29.4s,#7 eor w11,w11,w16 ext v2.16b,v2.16b,v2.16b,#8 eor w12,w12,w13 ext v6.16b,v6.16b,v6.16b,#8 eor w9,w9,w14 ext v10.16b,v10.16b,v10.16b,#8 ror w10,w10,#25 ext v14.16b,v14.16b,v14.16b,#8 ror w11,w11,#25 ext v18.16b,v18.16b,v18.16b,#8 ror w12,w12,#25 ext v22.16b,v22.16b,v22.16b,#8 ror w9,w9,#25 ext v3.16b,v3.16b,v3.16b,#4 ext v7.16b,v7.16b,v7.16b,#4 ext v11.16b,v11.16b,v11.16b,#4 ext v15.16b,v15.16b,v15.16b,#4 ext v19.16b,v19.16b,v19.16b,#4 ext v23.16b,v23.16b,v23.16b,#4 ext v1.16b,v1.16b,v1.16b,#12 ext v5.16b,v5.16b,v5.16b,#12 ext v9.16b,v9.16b,v9.16b,#12 ext v13.16b,v13.16b,v13.16b,#12 ext v17.16b,v17.16b,v17.16b,#12 ext v21.16b,v21.16b,v21.16b,#12 cbnz x4,Loop_lower_neon add w5,w5,w22 // accumulate key block ldp q24,q25,[sp,#0] add x6,x6,x22,lsr#32 ldp q26,q27,[sp,#32] add w7,w7,w23 ldp q28,q29,[sp,#64] add x8,x8,x23,lsr#32 add v0.4s,v0.4s,v24.4s add w9,w9,w24 add v4.4s,v4.4s,v24.4s add x10,x10,x24,lsr#32 add v8.4s,v8.4s,v24.4s add w11,w11,w25 add v12.4s,v12.4s,v24.4s add x12,x12,x25,lsr#32 add v16.4s,v16.4s,v24.4s add w13,w13,w26 add v20.4s,v20.4s,v24.4s add x14,x14,x26,lsr#32 add v2.4s,v2.4s,v26.4s add w15,w15,w27 add v6.4s,v6.4s,v26.4s add x16,x16,x27,lsr#32 add v10.4s,v10.4s,v26.4s add w17,w17,w28 add v14.4s,v14.4s,v26.4s add x19,x19,x28,lsr#32 add v18.4s,v18.4s,v26.4s add w20,w20,w30 add v22.4s,v22.4s,v26.4s add x21,x21,x30,lsr#32 add v19.4s,v19.4s,v31.4s // +4 add x5,x5,x6,lsl#32 // pack add v23.4s,v23.4s,v31.4s // +4 add x7,x7,x8,lsl#32 add v3.4s,v3.4s,v27.4s ldp x6,x8,[x1,#0] // load input add v7.4s,v7.4s,v28.4s add x9,x9,x10,lsl#32 add v11.4s,v11.4s,v29.4s add x11,x11,x12,lsl#32 add v15.4s,v15.4s,v30.4s ldp x10,x12,[x1,#16] add v19.4s,v19.4s,v27.4s add x13,x13,x14,lsl#32 add v23.4s,v23.4s,v28.4s add x15,x15,x16,lsl#32 add v1.4s,v1.4s,v25.4s ldp x14,x16,[x1,#32] add v5.4s,v5.4s,v25.4s add x17,x17,x19,lsl#32 add v9.4s,v9.4s,v25.4s add x20,x20,x21,lsl#32 add v13.4s,v13.4s,v25.4s ldp x19,x21,[x1,#48] add v17.4s,v17.4s,v25.4s add x1,x1,#64 add v21.4s,v21.4s,v25.4s #ifdef __AARCH64EB__ rev x5,x5 rev x7,x7 rev x9,x9 rev x11,x11 rev x13,x13 rev x15,x15 rev x17,x17 rev x20,x20 #endif ld1 {v24.16b,v25.16b,v26.16b,v27.16b},[x1],#64 eor x5,x5,x6 eor x7,x7,x8 eor x9,x9,x10 eor x11,x11,x12 eor x13,x13,x14 eor v0.16b,v0.16b,v24.16b eor x15,x15,x16 eor v1.16b,v1.16b,v25.16b eor x17,x17,x19 eor v2.16b,v2.16b,v26.16b eor x20,x20,x21 eor v3.16b,v3.16b,v27.16b ld1 {v24.16b,v25.16b,v26.16b,v27.16b},[x1],#64 stp x5,x7,[x0,#0] // store output add x28,x28,#7 // increment counter stp x9,x11,[x0,#16] stp x13,x15,[x0,#32] stp x17,x20,[x0,#48] add x0,x0,#64 st1 {v0.16b,v1.16b,v2.16b,v3.16b},[x0],#64 ld1 {v0.16b,v1.16b,v2.16b,v3.16b},[x1],#64 eor v4.16b,v4.16b,v24.16b eor v5.16b,v5.16b,v25.16b eor v6.16b,v6.16b,v26.16b eor v7.16b,v7.16b,v27.16b st1 {v4.16b,v5.16b,v6.16b,v7.16b},[x0],#64 ld1 {v4.16b,v5.16b,v6.16b,v7.16b},[x1],#64 eor v8.16b,v8.16b,v0.16b ldp q24,q25,[sp,#0] eor v9.16b,v9.16b,v1.16b ldp q26,q27,[sp,#32] eor v10.16b,v10.16b,v2.16b eor v11.16b,v11.16b,v3.16b st1 {v8.16b,v9.16b,v10.16b,v11.16b},[x0],#64 ld1 {v8.16b,v9.16b,v10.16b,v11.16b},[x1],#64 eor v12.16b,v12.16b,v4.16b eor v13.16b,v13.16b,v5.16b eor v14.16b,v14.16b,v6.16b eor v15.16b,v15.16b,v7.16b st1 {v12.16b,v13.16b,v14.16b,v15.16b},[x0],#64 ld1 {v12.16b,v13.16b,v14.16b,v15.16b},[x1],#64 eor v16.16b,v16.16b,v8.16b eor v17.16b,v17.16b,v9.16b eor v18.16b,v18.16b,v10.16b eor v19.16b,v19.16b,v11.16b st1 {v16.16b,v17.16b,v18.16b,v19.16b},[x0],#64 shl v0.4s,v31.4s,#1 // 4 -> 8 eor v20.16b,v20.16b,v12.16b eor v21.16b,v21.16b,v13.16b eor v22.16b,v22.16b,v14.16b eor v23.16b,v23.16b,v15.16b st1 {v20.16b,v21.16b,v22.16b,v23.16b},[x0],#64 add v27.4s,v27.4s,v0.4s // += 8 add v28.4s,v28.4s,v0.4s add v29.4s,v29.4s,v0.4s add v30.4s,v30.4s,v0.4s b.hs Loop_outer_512_neon adds x2,x2,#512 ushr v0.4s,v31.4s,#2 // 4 -> 1 ldp d8,d9,[sp,#128+0] // meet ABI requirements ldp d10,d11,[sp,#128+16] ldp d12,d13,[sp,#128+32] ldp d14,d15,[sp,#128+48] stp q24,q31,[sp,#0] // wipe off-load area stp q24,q31,[sp,#32] stp q24,q31,[sp,#64] b.eq Ldone_512_neon cmp x2,#192 sub v27.4s,v27.4s,v0.4s // -= 1 sub v28.4s,v28.4s,v0.4s sub v29.4s,v29.4s,v0.4s add sp,sp,#128 b.hs Loop_outer_neon eor v25.16b,v25.16b,v25.16b eor v26.16b,v26.16b,v26.16b eor v27.16b,v27.16b,v27.16b eor v28.16b,v28.16b,v28.16b eor v29.16b,v29.16b,v29.16b eor v30.16b,v30.16b,v30.16b b Loop_outer Ldone_512_neon: ldp x19,x20,[x29,#16] add sp,sp,#128+64 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#96 AARCH64_VALIDATE_LINK_REGISTER ret #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
74,000
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/ios-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_AARCH64) && defined(__APPLE__) #include <openssl/arm_arch.h> .section __TEXT,__const .align 7 Lchacha20_consts: .byte 'e','x','p','a','n','d',' ','3','2','-','b','y','t','e',' ','k' Linc: .long 1,2,3,4 Lrol8: .byte 3,0,1,2, 7,4,5,6, 11,8,9,10, 15,12,13,14 Lclamp: .quad 0x0FFFFFFC0FFFFFFF, 0x0FFFFFFC0FFFFFFC .text .align 6 Lpoly_hash_ad_internal: .cfi_startproc cbnz x4, Lpoly_hash_intro ret Lpoly_hash_intro: cmp x4, #16 b.lt Lpoly_hash_ad_tail ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most sub x4, x4, #16 b Lpoly_hash_ad_internal Lpoly_hash_ad_tail: cbz x4, Lpoly_hash_ad_ret eor v20.16b, v20.16b, v20.16b // Use T0 to load the AAD sub x4, x4, #1 Lpoly_hash_tail_16_compose: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x3, x4] mov v20.b[0], w11 subs x4, x4, #1 b.ge Lpoly_hash_tail_16_compose mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most Lpoly_hash_ad_ret: ret .cfi_endproc ///////////////////////////////// // // void chacha20_poly1305_seal(uint8_t *pt, uint8_t *ct, size_t len_in, uint8_t *ad, size_t len_ad, union open_data *seal_data); // .globl _chacha20_poly1305_seal .private_extern _chacha20_poly1305_seal .align 6 _chacha20_poly1305_seal: AARCH64_SIGN_LINK_REGISTER .cfi_startproc stp x29, x30, [sp, #-80]! .cfi_def_cfa_offset 80 .cfi_offset w30, -72 .cfi_offset w29, -80 mov x29, sp // We probably could do .cfi_def_cfa w29, 80 at this point, but since // we don't actually use the frame pointer like that, it's probably not // worth bothering. stp d8, d9, [sp, #16] stp d10, d11, [sp, #32] stp d12, d13, [sp, #48] stp d14, d15, [sp, #64] .cfi_offset b15, -8 .cfi_offset b14, -16 .cfi_offset b13, -24 .cfi_offset b12, -32 .cfi_offset b11, -40 .cfi_offset b10, -48 .cfi_offset b9, -56 .cfi_offset b8, -64 adrp x11, Lchacha20_consts@PAGE add x11, x11, Lchacha20_consts@PAGEOFF ld1 {v24.16b - v27.16b}, [x11] // Load the CONSTS, INC, ROL8 and CLAMP values ld1 {v28.16b - v30.16b}, [x5] mov x15, #1 // Prepare the Poly1305 state mov x8, #0 mov x9, #0 mov x10, #0 ldr x12, [x5, #56] // The total cipher text length includes extra_in_len add x12, x12, x2 mov v31.d[0], x4 // Store the input and aad lengths mov v31.d[1], x12 cmp x2, #128 b.le Lseal_128 // Optimization for smaller buffers // Initially we prepare 5 ChaCha20 blocks. Four to encrypt up to 4 blocks (256 bytes) of plaintext, // and one for the Poly1305 R and S keys. The first four blocks (A0-A3..D0-D3) are computed vertically, // the fifth block (A4-D4) horizontally. ld4r {v0.4s,v1.4s,v2.4s,v3.4s}, [x11] mov v4.16b, v24.16b ld4r {v5.4s,v6.4s,v7.4s,v8.4s}, [x5], #16 mov v9.16b, v28.16b ld4r {v10.4s,v11.4s,v12.4s,v13.4s}, [x5], #16 mov v14.16b, v29.16b ld4r {v15.4s,v16.4s,v17.4s,v18.4s}, [x5] add v15.4s, v15.4s, v25.4s mov v19.16b, v30.16b sub x5, x5, #32 mov x6, #10 .align 5 Lseal_init_rounds: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v9.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v18.8h, v18.8h rev32 v19.8h, v19.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b eor v8.16b, v8.16b, v13.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v9.4s, #20 sli v8.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s add v3.4s, v3.4s, v7.4s add v4.4s, v4.4s, v8.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v18.16b, {v18.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v14.16b ushr v9.4s, v8.4s, #25 sli v9.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #4 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #12 add v0.4s, v0.4s, v6.4s add v1.4s, v1.4s, v7.4s add v2.4s, v2.4s, v8.4s add v3.4s, v3.4s, v5.4s add v4.4s, v4.4s, v9.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v18.8h, v18.8h rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v19.8h, v19.8h add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v5.4s, #20 sli v8.4s, v5.4s, #12 ushr v5.4s, v9.4s, #20 sli v5.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v5.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v18.16b, {v18.16b}, v26.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v12.16b eor v6.16b, v6.16b, v13.16b eor v7.16b, v7.16b, v10.16b eor v8.16b, v8.16b, v11.16b eor v5.16b, v5.16b, v14.16b ushr v9.4s, v5.4s, #25 sli v9.4s, v5.4s, #7 ushr v5.4s, v8.4s, #25 sli v5.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #12 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #4 subs x6, x6, #1 b.hi Lseal_init_rounds add v15.4s, v15.4s, v25.4s mov x11, #4 dup v20.4s, w11 add v25.4s, v25.4s, v20.4s zip1 v20.4s, v0.4s, v1.4s zip2 v21.4s, v0.4s, v1.4s zip1 v22.4s, v2.4s, v3.4s zip2 v23.4s, v2.4s, v3.4s zip1 v0.2d, v20.2d, v22.2d zip2 v1.2d, v20.2d, v22.2d zip1 v2.2d, v21.2d, v23.2d zip2 v3.2d, v21.2d, v23.2d zip1 v20.4s, v5.4s, v6.4s zip2 v21.4s, v5.4s, v6.4s zip1 v22.4s, v7.4s, v8.4s zip2 v23.4s, v7.4s, v8.4s zip1 v5.2d, v20.2d, v22.2d zip2 v6.2d, v20.2d, v22.2d zip1 v7.2d, v21.2d, v23.2d zip2 v8.2d, v21.2d, v23.2d zip1 v20.4s, v10.4s, v11.4s zip2 v21.4s, v10.4s, v11.4s zip1 v22.4s, v12.4s, v13.4s zip2 v23.4s, v12.4s, v13.4s zip1 v10.2d, v20.2d, v22.2d zip2 v11.2d, v20.2d, v22.2d zip1 v12.2d, v21.2d, v23.2d zip2 v13.2d, v21.2d, v23.2d zip1 v20.4s, v15.4s, v16.4s zip2 v21.4s, v15.4s, v16.4s zip1 v22.4s, v17.4s, v18.4s zip2 v23.4s, v17.4s, v18.4s zip1 v15.2d, v20.2d, v22.2d zip2 v16.2d, v20.2d, v22.2d zip1 v17.2d, v21.2d, v23.2d zip2 v18.2d, v21.2d, v23.2d add v4.4s, v4.4s, v24.4s add v9.4s, v9.4s, v28.4s and v4.16b, v4.16b, v27.16b add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s add v10.4s, v10.4s, v29.4s add v15.4s, v15.4s, v30.4s add v1.4s, v1.4s, v24.4s add v6.4s, v6.4s, v28.4s add v11.4s, v11.4s, v29.4s add v16.4s, v16.4s, v30.4s add v2.4s, v2.4s, v24.4s add v7.4s, v7.4s, v28.4s add v12.4s, v12.4s, v29.4s add v17.4s, v17.4s, v30.4s add v3.4s, v3.4s, v24.4s add v8.4s, v8.4s, v28.4s add v13.4s, v13.4s, v29.4s add v18.4s, v18.4s, v30.4s mov x16, v4.d[0] // Move the R key to GPRs mov x17, v4.d[1] mov v27.16b, v9.16b // Store the S key bl Lpoly_hash_ad_internal mov x3, x0 cmp x2, #256 b.le Lseal_tail ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v2.16b eor v21.16b, v21.16b, v7.16b eor v22.16b, v22.16b, v12.16b eor v23.16b, v23.16b, v17.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v3.16b eor v21.16b, v21.16b, v8.16b eor v22.16b, v22.16b, v13.16b eor v23.16b, v23.16b, v18.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #256 mov x6, #4 // In the first run of the loop we need to hash 256 bytes, therefore we hash one block for the first 4 rounds mov x7, #6 // and two blocks for the remaining 6, for a total of (1 * 4 + 2 * 6) * 16 = 256 Lseal_main_loop: adrp x11, Lchacha20_consts@PAGE add x11, x11, Lchacha20_consts@PAGEOFF ld4r {v0.4s,v1.4s,v2.4s,v3.4s}, [x11] mov v4.16b, v24.16b ld4r {v5.4s,v6.4s,v7.4s,v8.4s}, [x5], #16 mov v9.16b, v28.16b ld4r {v10.4s,v11.4s,v12.4s,v13.4s}, [x5], #16 mov v14.16b, v29.16b ld4r {v15.4s,v16.4s,v17.4s,v18.4s}, [x5] add v15.4s, v15.4s, v25.4s mov v19.16b, v30.16b eor v20.16b, v20.16b, v20.16b //zero not v21.16b, v20.16b // -1 sub v21.4s, v25.4s, v21.4s // Add +1 ext v20.16b, v21.16b, v20.16b, #12 // Get the last element (counter) add v19.4s, v19.4s, v20.4s sub x5, x5, #32 .align 5 Lseal_main_loop_rounds: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v9.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v18.8h, v18.8h rev32 v19.8h, v19.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b eor v8.16b, v8.16b, v13.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v9.4s, #20 sli v8.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s add v3.4s, v3.4s, v7.4s add v4.4s, v4.4s, v8.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v18.16b, {v18.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v14.16b ushr v9.4s, v8.4s, #25 sli v9.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #4 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #12 ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most add v0.4s, v0.4s, v6.4s add v1.4s, v1.4s, v7.4s add v2.4s, v2.4s, v8.4s add v3.4s, v3.4s, v5.4s add v4.4s, v4.4s, v9.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v18.8h, v18.8h rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v19.8h, v19.8h add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v5.4s, #20 sli v8.4s, v5.4s, #12 ushr v5.4s, v9.4s, #20 sli v5.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v5.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v18.16b, {v18.16b}, v26.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v12.16b eor v6.16b, v6.16b, v13.16b eor v7.16b, v7.16b, v10.16b eor v8.16b, v8.16b, v11.16b eor v5.16b, v5.16b, v14.16b ushr v9.4s, v5.4s, #25 sli v9.4s, v5.4s, #7 ushr v5.4s, v8.4s, #25 sli v5.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #12 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #4 subs x6, x6, #1 b.ge Lseal_main_loop_rounds ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most subs x7, x7, #1 b.gt Lseal_main_loop_rounds eor v20.16b, v20.16b, v20.16b //zero not v21.16b, v20.16b // -1 sub v21.4s, v25.4s, v21.4s // Add +1 ext v20.16b, v21.16b, v20.16b, #12 // Get the last element (counter) add v19.4s, v19.4s, v20.4s add v15.4s, v15.4s, v25.4s mov x11, #5 dup v20.4s, w11 add v25.4s, v25.4s, v20.4s zip1 v20.4s, v0.4s, v1.4s zip2 v21.4s, v0.4s, v1.4s zip1 v22.4s, v2.4s, v3.4s zip2 v23.4s, v2.4s, v3.4s zip1 v0.2d, v20.2d, v22.2d zip2 v1.2d, v20.2d, v22.2d zip1 v2.2d, v21.2d, v23.2d zip2 v3.2d, v21.2d, v23.2d zip1 v20.4s, v5.4s, v6.4s zip2 v21.4s, v5.4s, v6.4s zip1 v22.4s, v7.4s, v8.4s zip2 v23.4s, v7.4s, v8.4s zip1 v5.2d, v20.2d, v22.2d zip2 v6.2d, v20.2d, v22.2d zip1 v7.2d, v21.2d, v23.2d zip2 v8.2d, v21.2d, v23.2d zip1 v20.4s, v10.4s, v11.4s zip2 v21.4s, v10.4s, v11.4s zip1 v22.4s, v12.4s, v13.4s zip2 v23.4s, v12.4s, v13.4s zip1 v10.2d, v20.2d, v22.2d zip2 v11.2d, v20.2d, v22.2d zip1 v12.2d, v21.2d, v23.2d zip2 v13.2d, v21.2d, v23.2d zip1 v20.4s, v15.4s, v16.4s zip2 v21.4s, v15.4s, v16.4s zip1 v22.4s, v17.4s, v18.4s zip2 v23.4s, v17.4s, v18.4s zip1 v15.2d, v20.2d, v22.2d zip2 v16.2d, v20.2d, v22.2d zip1 v17.2d, v21.2d, v23.2d zip2 v18.2d, v21.2d, v23.2d add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s add v10.4s, v10.4s, v29.4s add v15.4s, v15.4s, v30.4s add v1.4s, v1.4s, v24.4s add v6.4s, v6.4s, v28.4s add v11.4s, v11.4s, v29.4s add v16.4s, v16.4s, v30.4s add v2.4s, v2.4s, v24.4s add v7.4s, v7.4s, v28.4s add v12.4s, v12.4s, v29.4s add v17.4s, v17.4s, v30.4s add v3.4s, v3.4s, v24.4s add v8.4s, v8.4s, v28.4s add v13.4s, v13.4s, v29.4s add v18.4s, v18.4s, v30.4s add v4.4s, v4.4s, v24.4s add v9.4s, v9.4s, v28.4s add v14.4s, v14.4s, v29.4s add v19.4s, v19.4s, v30.4s cmp x2, #320 b.le Lseal_tail ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v2.16b eor v21.16b, v21.16b, v7.16b eor v22.16b, v22.16b, v12.16b eor v23.16b, v23.16b, v17.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v3.16b eor v21.16b, v21.16b, v8.16b eor v22.16b, v22.16b, v13.16b eor v23.16b, v23.16b, v18.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v4.16b eor v21.16b, v21.16b, v9.16b eor v22.16b, v22.16b, v14.16b eor v23.16b, v23.16b, v19.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #320 mov x6, #0 mov x7, #10 // For the remainder of the loop we always hash and encrypt 320 bytes per iteration b Lseal_main_loop Lseal_tail: // This part of the function handles the storage and authentication of the last [0,320) bytes // We assume A0-A4 ... D0-D4 hold at least inl (320 max) bytes of the stream data. cmp x2, #64 b.lt Lseal_tail_64 // Store and authenticate 64B blocks per iteration ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v21.d[0] mov x12, v21.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v22.d[0] mov x12, v22.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v23.d[0] mov x12, v23.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 // Shift the state left by 64 bytes for the next iteration of the loop mov v0.16b, v1.16b mov v5.16b, v6.16b mov v10.16b, v11.16b mov v15.16b, v16.16b mov v1.16b, v2.16b mov v6.16b, v7.16b mov v11.16b, v12.16b mov v16.16b, v17.16b mov v2.16b, v3.16b mov v7.16b, v8.16b mov v12.16b, v13.16b mov v17.16b, v18.16b mov v3.16b, v4.16b mov v8.16b, v9.16b mov v13.16b, v14.16b mov v18.16b, v19.16b b Lseal_tail Lseal_tail_64: ldp x3, x4, [x5, #48] // extra_in_len and extra_in_ptr // Here we handle the last [0,64) bytes of plaintext cmp x2, #16 b.lt Lseal_tail_16 // Each iteration encrypt and authenticate a 16B block ld1 {v20.16b}, [x1], #16 eor v20.16b, v20.16b, v0.16b mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most st1 {v20.16b}, [x0], #16 sub x2, x2, #16 // Shift the state left by 16 bytes for the next iteration of the loop mov v0.16b, v5.16b mov v5.16b, v10.16b mov v10.16b, v15.16b b Lseal_tail_64 Lseal_tail_16: // Here we handle the last [0,16) bytes of ciphertext that require a padded block cbz x2, Lseal_hash_extra eor v20.16b, v20.16b, v20.16b // Use T0 to load the plaintext/extra in eor v21.16b, v21.16b, v21.16b // Use T1 to generate an AND mask that will only mask the ciphertext bytes not v22.16b, v20.16b mov x6, x2 add x1, x1, x2 cbz x4, Lseal_tail_16_compose // No extra data to pad with, zero padding mov x7, #16 // We need to load some extra_in first for padding sub x7, x7, x2 cmp x4, x7 csel x7, x4, x7, lt // Load the minimum of extra_in_len and the amount needed to fill the register mov x12, x7 add x3, x3, x7 sub x4, x4, x7 Lseal_tail16_compose_extra_in: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x3, #-1]! mov v20.b[0], w11 subs x7, x7, #1 b.gt Lseal_tail16_compose_extra_in add x3, x3, x12 Lseal_tail_16_compose: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x1, #-1]! mov v20.b[0], w11 ext v21.16b, v22.16b, v21.16b, #15 subs x2, x2, #1 b.gt Lseal_tail_16_compose and v0.16b, v0.16b, v21.16b eor v20.16b, v20.16b, v0.16b mov v21.16b, v20.16b Lseal_tail_16_store: umov w11, v20.b[0] strb w11, [x0], #1 ext v20.16b, v20.16b, v20.16b, #1 subs x6, x6, #1 b.gt Lseal_tail_16_store // Hash in the final ct block concatenated with extra_in mov x11, v21.d[0] mov x12, v21.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most Lseal_hash_extra: cbz x4, Lseal_finalize Lseal_hash_extra_loop: cmp x4, #16 b.lt Lseal_hash_extra_tail ld1 {v20.16b}, [x3], #16 mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most sub x4, x4, #16 b Lseal_hash_extra_loop Lseal_hash_extra_tail: cbz x4, Lseal_finalize eor v20.16b, v20.16b, v20.16b // Use T0 to load the remaining extra ciphertext add x3, x3, x4 Lseal_hash_extra_load: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x3, #-1]! mov v20.b[0], w11 subs x4, x4, #1 b.gt Lseal_hash_extra_load // Hash in the final padded extra_in blcok mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most Lseal_finalize: mov x11, v31.d[0] mov x12, v31.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most // Final reduction step sub x12, xzr, x15 orr x13, xzr, #3 subs x11, x8, #-5 sbcs x12, x9, x12 sbcs x13, x10, x13 csel x8, x11, x8, cs csel x9, x12, x9, cs csel x10, x13, x10, cs mov x11, v27.d[0] mov x12, v27.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 stp x8, x9, [x5] ldp d8, d9, [sp, #16] ldp d10, d11, [sp, #32] ldp d12, d13, [sp, #48] ldp d14, d15, [sp, #64] .cfi_restore b15 .cfi_restore b14 .cfi_restore b13 .cfi_restore b12 .cfi_restore b11 .cfi_restore b10 .cfi_restore b9 .cfi_restore b8 ldp x29, x30, [sp], 80 .cfi_restore w29 .cfi_restore w30 .cfi_def_cfa_offset 0 AARCH64_VALIDATE_LINK_REGISTER ret Lseal_128: // On some architectures preparing 5 blocks for small buffers is wasteful eor v25.16b, v25.16b, v25.16b mov x11, #1 mov v25.s[0], w11 mov v0.16b, v24.16b mov v1.16b, v24.16b mov v2.16b, v24.16b mov v5.16b, v28.16b mov v6.16b, v28.16b mov v7.16b, v28.16b mov v10.16b, v29.16b mov v11.16b, v29.16b mov v12.16b, v29.16b mov v17.16b, v30.16b add v15.4s, v17.4s, v25.4s add v16.4s, v15.4s, v25.4s mov x6, #10 Lseal_128_rounds: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v6.16b, v6.16b, v6.16b, #4 ext v7.16b, v7.16b, v7.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 ext v16.16b, v16.16b, v16.16b, #12 ext v17.16b, v17.16b, v17.16b, #12 add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v6.16b, v6.16b, v6.16b, #12 ext v7.16b, v7.16b, v7.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 ext v16.16b, v16.16b, v16.16b, #4 ext v17.16b, v17.16b, v17.16b, #4 subs x6, x6, #1 b.hi Lseal_128_rounds add v0.4s, v0.4s, v24.4s add v1.4s, v1.4s, v24.4s add v2.4s, v2.4s, v24.4s add v5.4s, v5.4s, v28.4s add v6.4s, v6.4s, v28.4s add v7.4s, v7.4s, v28.4s // Only the first 32 bytes of the third block (counter = 0) are needed, // so skip updating v12 and v17. add v10.4s, v10.4s, v29.4s add v11.4s, v11.4s, v29.4s add v30.4s, v30.4s, v25.4s add v15.4s, v15.4s, v30.4s add v30.4s, v30.4s, v25.4s add v16.4s, v16.4s, v30.4s and v2.16b, v2.16b, v27.16b mov x16, v2.d[0] // Move the R key to GPRs mov x17, v2.d[1] mov v27.16b, v7.16b // Store the S key bl Lpoly_hash_ad_internal b Lseal_tail .cfi_endproc ///////////////////////////////// // // void chacha20_poly1305_open(uint8_t *pt, uint8_t *ct, size_t len_in, uint8_t *ad, size_t len_ad, union open_data *aead_data); // .globl _chacha20_poly1305_open .private_extern _chacha20_poly1305_open .align 6 _chacha20_poly1305_open: AARCH64_SIGN_LINK_REGISTER .cfi_startproc stp x29, x30, [sp, #-80]! .cfi_def_cfa_offset 80 .cfi_offset w30, -72 .cfi_offset w29, -80 mov x29, sp // We probably could do .cfi_def_cfa w29, 80 at this point, but since // we don't actually use the frame pointer like that, it's probably not // worth bothering. stp d8, d9, [sp, #16] stp d10, d11, [sp, #32] stp d12, d13, [sp, #48] stp d14, d15, [sp, #64] .cfi_offset b15, -8 .cfi_offset b14, -16 .cfi_offset b13, -24 .cfi_offset b12, -32 .cfi_offset b11, -40 .cfi_offset b10, -48 .cfi_offset b9, -56 .cfi_offset b8, -64 adrp x11, Lchacha20_consts@PAGE add x11, x11, Lchacha20_consts@PAGEOFF ld1 {v24.16b - v27.16b}, [x11] // Load the CONSTS, INC, ROL8 and CLAMP values ld1 {v28.16b - v30.16b}, [x5] mov x15, #1 // Prepare the Poly1305 state mov x8, #0 mov x9, #0 mov x10, #0 mov v31.d[0], x4 // Store the input and aad lengths mov v31.d[1], x2 cmp x2, #128 b.le Lopen_128 // Optimization for smaller buffers // Initially we prepare a single ChaCha20 block for the Poly1305 R and S keys mov v0.16b, v24.16b mov v5.16b, v28.16b mov v10.16b, v29.16b mov v15.16b, v30.16b mov x6, #10 .align 5 Lopen_init_rounds: add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 subs x6, x6, #1 b.hi Lopen_init_rounds add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s and v0.16b, v0.16b, v27.16b mov x16, v0.d[0] // Move the R key to GPRs mov x17, v0.d[1] mov v27.16b, v5.16b // Store the S key bl Lpoly_hash_ad_internal Lopen_ad_done: mov x3, x1 // Each iteration of the loop hash 320 bytes, and prepare stream for 320 bytes Lopen_main_loop: cmp x2, #192 b.lt Lopen_tail adrp x11, Lchacha20_consts@PAGE add x11, x11, Lchacha20_consts@PAGEOFF ld4r {v0.4s,v1.4s,v2.4s,v3.4s}, [x11] mov v4.16b, v24.16b ld4r {v5.4s,v6.4s,v7.4s,v8.4s}, [x5], #16 mov v9.16b, v28.16b ld4r {v10.4s,v11.4s,v12.4s,v13.4s}, [x5], #16 mov v14.16b, v29.16b ld4r {v15.4s,v16.4s,v17.4s,v18.4s}, [x5] sub x5, x5, #32 add v15.4s, v15.4s, v25.4s mov v19.16b, v30.16b eor v20.16b, v20.16b, v20.16b //zero not v21.16b, v20.16b // -1 sub v21.4s, v25.4s, v21.4s // Add +1 ext v20.16b, v21.16b, v20.16b, #12 // Get the last element (counter) add v19.4s, v19.4s, v20.4s lsr x4, x2, #4 // How many whole blocks we have to hash, will always be at least 12 sub x4, x4, #10 mov x7, #10 subs x6, x7, x4 subs x6, x7, x4 // itr1 can be negative if we have more than 320 bytes to hash csel x7, x7, x4, le // if itr1 is zero or less, itr2 should be 10 to indicate all 10 rounds are full cbz x7, Lopen_main_loop_rounds_short .align 5 Lopen_main_loop_rounds: ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most Lopen_main_loop_rounds_short: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v9.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v18.8h, v18.8h rev32 v19.8h, v19.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b eor v8.16b, v8.16b, v13.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v9.4s, #20 sli v8.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s add v3.4s, v3.4s, v7.4s add v4.4s, v4.4s, v8.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b eor v18.16b, v18.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v18.16b, {v18.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s add v13.4s, v13.4s, v18.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v14.16b ushr v9.4s, v8.4s, #25 sli v9.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #4 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #12 ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most add v0.4s, v0.4s, v6.4s add v1.4s, v1.4s, v7.4s add v2.4s, v2.4s, v8.4s add v3.4s, v3.4s, v5.4s add v4.4s, v4.4s, v9.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b rev32 v18.8h, v18.8h rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h rev32 v19.8h, v19.8h add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v6.16b, v6.16b, v12.16b eor v7.16b, v7.16b, v13.16b eor v8.16b, v8.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v9.16b, v9.16b, v14.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 ushr v7.4s, v8.4s, #20 sli v7.4s, v8.4s, #12 ushr v8.4s, v5.4s, #20 sli v8.4s, v5.4s, #12 ushr v5.4s, v9.4s, #20 sli v5.4s, v9.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s add v3.4s, v3.4s, v8.4s add v4.4s, v4.4s, v5.4s eor v18.16b, v18.16b, v0.16b eor v15.16b, v15.16b, v1.16b eor v16.16b, v16.16b, v2.16b eor v17.16b, v17.16b, v3.16b eor v19.16b, v19.16b, v4.16b tbl v18.16b, {v18.16b}, v26.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b tbl v19.16b, {v19.16b}, v26.16b add v12.4s, v12.4s, v18.4s add v13.4s, v13.4s, v15.4s add v10.4s, v10.4s, v16.4s add v11.4s, v11.4s, v17.4s add v14.4s, v14.4s, v19.4s eor v20.16b, v20.16b, v12.16b eor v6.16b, v6.16b, v13.16b eor v7.16b, v7.16b, v10.16b eor v8.16b, v8.16b, v11.16b eor v5.16b, v5.16b, v14.16b ushr v9.4s, v5.4s, #25 sli v9.4s, v5.4s, #7 ushr v5.4s, v8.4s, #25 sli v5.4s, v8.4s, #7 ushr v8.4s, v7.4s, #25 sli v8.4s, v7.4s, #7 ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v9.16b, v9.16b, v9.16b, #12 ext v14.16b, v14.16b, v14.16b, #8 ext v19.16b, v19.16b, v19.16b, #4 subs x7, x7, #1 b.gt Lopen_main_loop_rounds subs x6, x6, #1 b.ge Lopen_main_loop_rounds_short eor v20.16b, v20.16b, v20.16b //zero not v21.16b, v20.16b // -1 sub v21.4s, v25.4s, v21.4s // Add +1 ext v20.16b, v21.16b, v20.16b, #12 // Get the last element (counter) add v19.4s, v19.4s, v20.4s add v15.4s, v15.4s, v25.4s mov x11, #5 dup v20.4s, w11 add v25.4s, v25.4s, v20.4s zip1 v20.4s, v0.4s, v1.4s zip2 v21.4s, v0.4s, v1.4s zip1 v22.4s, v2.4s, v3.4s zip2 v23.4s, v2.4s, v3.4s zip1 v0.2d, v20.2d, v22.2d zip2 v1.2d, v20.2d, v22.2d zip1 v2.2d, v21.2d, v23.2d zip2 v3.2d, v21.2d, v23.2d zip1 v20.4s, v5.4s, v6.4s zip2 v21.4s, v5.4s, v6.4s zip1 v22.4s, v7.4s, v8.4s zip2 v23.4s, v7.4s, v8.4s zip1 v5.2d, v20.2d, v22.2d zip2 v6.2d, v20.2d, v22.2d zip1 v7.2d, v21.2d, v23.2d zip2 v8.2d, v21.2d, v23.2d zip1 v20.4s, v10.4s, v11.4s zip2 v21.4s, v10.4s, v11.4s zip1 v22.4s, v12.4s, v13.4s zip2 v23.4s, v12.4s, v13.4s zip1 v10.2d, v20.2d, v22.2d zip2 v11.2d, v20.2d, v22.2d zip1 v12.2d, v21.2d, v23.2d zip2 v13.2d, v21.2d, v23.2d zip1 v20.4s, v15.4s, v16.4s zip2 v21.4s, v15.4s, v16.4s zip1 v22.4s, v17.4s, v18.4s zip2 v23.4s, v17.4s, v18.4s zip1 v15.2d, v20.2d, v22.2d zip2 v16.2d, v20.2d, v22.2d zip1 v17.2d, v21.2d, v23.2d zip2 v18.2d, v21.2d, v23.2d add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s add v10.4s, v10.4s, v29.4s add v15.4s, v15.4s, v30.4s add v1.4s, v1.4s, v24.4s add v6.4s, v6.4s, v28.4s add v11.4s, v11.4s, v29.4s add v16.4s, v16.4s, v30.4s add v2.4s, v2.4s, v24.4s add v7.4s, v7.4s, v28.4s add v12.4s, v12.4s, v29.4s add v17.4s, v17.4s, v30.4s add v3.4s, v3.4s, v24.4s add v8.4s, v8.4s, v28.4s add v13.4s, v13.4s, v29.4s add v18.4s, v18.4s, v30.4s add v4.4s, v4.4s, v24.4s add v9.4s, v9.4s, v28.4s add v14.4s, v14.4s, v29.4s add v19.4s, v19.4s, v30.4s // We can always safely store 192 bytes ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v2.16b eor v21.16b, v21.16b, v7.16b eor v22.16b, v22.16b, v12.16b eor v23.16b, v23.16b, v17.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #192 mov v0.16b, v3.16b mov v5.16b, v8.16b mov v10.16b, v13.16b mov v15.16b, v18.16b cmp x2, #64 b.lt Lopen_tail_64_store ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v3.16b eor v21.16b, v21.16b, v8.16b eor v22.16b, v22.16b, v13.16b eor v23.16b, v23.16b, v18.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 mov v0.16b, v4.16b mov v5.16b, v9.16b mov v10.16b, v14.16b mov v15.16b, v19.16b cmp x2, #64 b.lt Lopen_tail_64_store ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v4.16b eor v21.16b, v21.16b, v9.16b eor v22.16b, v22.16b, v14.16b eor v23.16b, v23.16b, v19.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 b Lopen_main_loop Lopen_tail: cbz x2, Lopen_finalize lsr x4, x2, #4 // How many whole blocks we have to hash cmp x2, #64 b.le Lopen_tail_64 cmp x2, #128 b.le Lopen_tail_128 Lopen_tail_192: // We need three more blocks mov v0.16b, v24.16b mov v1.16b, v24.16b mov v2.16b, v24.16b mov v5.16b, v28.16b mov v6.16b, v28.16b mov v7.16b, v28.16b mov v10.16b, v29.16b mov v11.16b, v29.16b mov v12.16b, v29.16b mov v15.16b, v30.16b mov v16.16b, v30.16b mov v17.16b, v30.16b eor v23.16b, v23.16b, v23.16b eor v21.16b, v21.16b, v21.16b ins v23.s[0], v25.s[0] ins v21.d[0], x15 add v22.4s, v23.4s, v21.4s add v21.4s, v22.4s, v21.4s add v15.4s, v15.4s, v21.4s add v16.4s, v16.4s, v23.4s add v17.4s, v17.4s, v22.4s mov x7, #10 subs x6, x7, x4 // itr1 can be negative if we have more than 160 bytes to hash csel x7, x7, x4, le // if itr1 is zero or less, itr2 should be 10 to indicate all 10 rounds are hashing sub x4, x4, x7 cbz x7, Lopen_tail_192_rounds_no_hash Lopen_tail_192_rounds: ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most Lopen_tail_192_rounds_no_hash: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v6.16b, v6.16b, v6.16b, #4 ext v7.16b, v7.16b, v7.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 ext v16.16b, v16.16b, v16.16b, #12 ext v17.16b, v17.16b, v17.16b, #12 add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v6.16b, v6.16b, v6.16b, #12 ext v7.16b, v7.16b, v7.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 ext v16.16b, v16.16b, v16.16b, #4 ext v17.16b, v17.16b, v17.16b, #4 subs x7, x7, #1 b.gt Lopen_tail_192_rounds subs x6, x6, #1 b.ge Lopen_tail_192_rounds_no_hash // We hashed 160 bytes at most, may still have 32 bytes left Lopen_tail_192_hash: cbz x4, Lopen_tail_192_hash_done ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most sub x4, x4, #1 b Lopen_tail_192_hash Lopen_tail_192_hash_done: add v0.4s, v0.4s, v24.4s add v1.4s, v1.4s, v24.4s add v2.4s, v2.4s, v24.4s add v5.4s, v5.4s, v28.4s add v6.4s, v6.4s, v28.4s add v7.4s, v7.4s, v28.4s add v10.4s, v10.4s, v29.4s add v11.4s, v11.4s, v29.4s add v12.4s, v12.4s, v29.4s add v15.4s, v15.4s, v30.4s add v16.4s, v16.4s, v30.4s add v17.4s, v17.4s, v30.4s add v15.4s, v15.4s, v21.4s add v16.4s, v16.4s, v23.4s add v17.4s, v17.4s, v22.4s ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v2.16b eor v21.16b, v21.16b, v7.16b eor v22.16b, v22.16b, v12.16b eor v23.16b, v23.16b, v17.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #128 b Lopen_tail_64_store Lopen_tail_128: // We need two more blocks mov v0.16b, v24.16b mov v1.16b, v24.16b mov v5.16b, v28.16b mov v6.16b, v28.16b mov v10.16b, v29.16b mov v11.16b, v29.16b mov v15.16b, v30.16b mov v16.16b, v30.16b eor v23.16b, v23.16b, v23.16b eor v22.16b, v22.16b, v22.16b ins v23.s[0], v25.s[0] ins v22.d[0], x15 add v22.4s, v22.4s, v23.4s add v15.4s, v15.4s, v22.4s add v16.4s, v16.4s, v23.4s mov x6, #10 sub x6, x6, x4 Lopen_tail_128_rounds: add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 add v1.4s, v1.4s, v6.4s eor v16.16b, v16.16b, v1.16b rev32 v16.8h, v16.8h add v11.4s, v11.4s, v16.4s eor v6.16b, v6.16b, v11.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 add v1.4s, v1.4s, v20.4s eor v16.16b, v16.16b, v1.16b tbl v16.16b, {v16.16b}, v26.16b add v11.4s, v11.4s, v16.4s eor v20.16b, v20.16b, v11.16b ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v6.16b, v6.16b, v6.16b, #4 ext v11.16b, v11.16b, v11.16b, #8 ext v16.16b, v16.16b, v16.16b, #12 add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 add v1.4s, v1.4s, v6.4s eor v16.16b, v16.16b, v1.16b rev32 v16.8h, v16.8h add v11.4s, v11.4s, v16.4s eor v6.16b, v6.16b, v11.16b ushr v20.4s, v6.4s, #20 sli v20.4s, v6.4s, #12 add v1.4s, v1.4s, v20.4s eor v16.16b, v16.16b, v1.16b tbl v16.16b, {v16.16b}, v26.16b add v11.4s, v11.4s, v16.4s eor v20.16b, v20.16b, v11.16b ushr v6.4s, v20.4s, #25 sli v6.4s, v20.4s, #7 ext v6.16b, v6.16b, v6.16b, #12 ext v11.16b, v11.16b, v11.16b, #8 ext v16.16b, v16.16b, v16.16b, #4 subs x6, x6, #1 b.gt Lopen_tail_128_rounds cbz x4, Lopen_tail_128_rounds_done subs x4, x4, #1 ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most b Lopen_tail_128_rounds Lopen_tail_128_rounds_done: add v0.4s, v0.4s, v24.4s add v1.4s, v1.4s, v24.4s add v5.4s, v5.4s, v28.4s add v6.4s, v6.4s, v28.4s add v10.4s, v10.4s, v29.4s add v11.4s, v11.4s, v29.4s add v15.4s, v15.4s, v30.4s add v16.4s, v16.4s, v30.4s add v15.4s, v15.4s, v22.4s add v16.4s, v16.4s, v23.4s ld1 {v20.16b - v23.16b}, [x1], #64 eor v20.16b, v20.16b, v1.16b eor v21.16b, v21.16b, v6.16b eor v22.16b, v22.16b, v11.16b eor v23.16b, v23.16b, v16.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 b Lopen_tail_64_store Lopen_tail_64: // We just need a single block mov v0.16b, v24.16b mov v5.16b, v28.16b mov v10.16b, v29.16b mov v15.16b, v30.16b eor v23.16b, v23.16b, v23.16b ins v23.s[0], v25.s[0] add v15.4s, v15.4s, v23.4s mov x6, #10 sub x6, x6, x4 Lopen_tail_64_rounds: add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 add v0.4s, v0.4s, v5.4s eor v15.16b, v15.16b, v0.16b rev32 v15.8h, v15.8h add v10.4s, v10.4s, v15.4s eor v5.16b, v5.16b, v10.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 add v0.4s, v0.4s, v20.4s eor v15.16b, v15.16b, v0.16b tbl v15.16b, {v15.16b}, v26.16b add v10.4s, v10.4s, v15.4s eor v20.16b, v20.16b, v10.16b ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 subs x6, x6, #1 b.gt Lopen_tail_64_rounds cbz x4, Lopen_tail_64_rounds_done subs x4, x4, #1 ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most b Lopen_tail_64_rounds Lopen_tail_64_rounds_done: add v0.4s, v0.4s, v24.4s add v5.4s, v5.4s, v28.4s add v10.4s, v10.4s, v29.4s add v15.4s, v15.4s, v30.4s add v15.4s, v15.4s, v23.4s Lopen_tail_64_store: cmp x2, #16 b.lt Lopen_tail_16 ld1 {v20.16b}, [x1], #16 eor v20.16b, v20.16b, v0.16b st1 {v20.16b}, [x0], #16 mov v0.16b, v5.16b mov v5.16b, v10.16b mov v10.16b, v15.16b sub x2, x2, #16 b Lopen_tail_64_store Lopen_tail_16: // Here we handle the last [0,16) bytes that require a padded block cbz x2, Lopen_finalize eor v20.16b, v20.16b, v20.16b // Use T0 to load the ciphertext eor v21.16b, v21.16b, v21.16b // Use T1 to generate an AND mask not v22.16b, v20.16b add x7, x1, x2 mov x6, x2 Lopen_tail_16_compose: ext v20.16b, v20.16b, v20.16b, #15 ldrb w11, [x7, #-1]! mov v20.b[0], w11 ext v21.16b, v22.16b, v21.16b, #15 subs x2, x2, #1 b.gt Lopen_tail_16_compose and v20.16b, v20.16b, v21.16b // Hash in the final padded block mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most eor v20.16b, v20.16b, v0.16b Lopen_tail_16_store: umov w11, v20.b[0] strb w11, [x0], #1 ext v20.16b, v20.16b, v20.16b, #1 subs x6, x6, #1 b.gt Lopen_tail_16_store Lopen_finalize: mov x11, v31.d[0] mov x12, v31.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most // Final reduction step sub x12, xzr, x15 orr x13, xzr, #3 subs x11, x8, #-5 sbcs x12, x9, x12 sbcs x13, x10, x13 csel x8, x11, x8, cs csel x9, x12, x9, cs csel x10, x13, x10, cs mov x11, v27.d[0] mov x12, v27.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 stp x8, x9, [x5] ldp d8, d9, [sp, #16] ldp d10, d11, [sp, #32] ldp d12, d13, [sp, #48] ldp d14, d15, [sp, #64] .cfi_restore b15 .cfi_restore b14 .cfi_restore b13 .cfi_restore b12 .cfi_restore b11 .cfi_restore b10 .cfi_restore b9 .cfi_restore b8 ldp x29, x30, [sp], 80 .cfi_restore w29 .cfi_restore w30 .cfi_def_cfa_offset 0 AARCH64_VALIDATE_LINK_REGISTER ret Lopen_128: // On some architectures preparing 5 blocks for small buffers is wasteful eor v25.16b, v25.16b, v25.16b mov x11, #1 mov v25.s[0], w11 mov v0.16b, v24.16b mov v1.16b, v24.16b mov v2.16b, v24.16b mov v5.16b, v28.16b mov v6.16b, v28.16b mov v7.16b, v28.16b mov v10.16b, v29.16b mov v11.16b, v29.16b mov v12.16b, v29.16b mov v17.16b, v30.16b add v15.4s, v17.4s, v25.4s add v16.4s, v15.4s, v25.4s mov x6, #10 Lopen_128_rounds: add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #4 ext v6.16b, v6.16b, v6.16b, #4 ext v7.16b, v7.16b, v7.16b, #4 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #12 ext v16.16b, v16.16b, v16.16b, #12 ext v17.16b, v17.16b, v17.16b, #12 add v0.4s, v0.4s, v5.4s add v1.4s, v1.4s, v6.4s add v2.4s, v2.4s, v7.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b rev32 v15.8h, v15.8h rev32 v16.8h, v16.8h rev32 v17.8h, v17.8h add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v5.16b, v5.16b, v10.16b eor v6.16b, v6.16b, v11.16b eor v7.16b, v7.16b, v12.16b ushr v20.4s, v5.4s, #20 sli v20.4s, v5.4s, #12 ushr v5.4s, v6.4s, #20 sli v5.4s, v6.4s, #12 ushr v6.4s, v7.4s, #20 sli v6.4s, v7.4s, #12 add v0.4s, v0.4s, v20.4s add v1.4s, v1.4s, v5.4s add v2.4s, v2.4s, v6.4s eor v15.16b, v15.16b, v0.16b eor v16.16b, v16.16b, v1.16b eor v17.16b, v17.16b, v2.16b tbl v15.16b, {v15.16b}, v26.16b tbl v16.16b, {v16.16b}, v26.16b tbl v17.16b, {v17.16b}, v26.16b add v10.4s, v10.4s, v15.4s add v11.4s, v11.4s, v16.4s add v12.4s, v12.4s, v17.4s eor v20.16b, v20.16b, v10.16b eor v5.16b, v5.16b, v11.16b eor v6.16b, v6.16b, v12.16b ushr v7.4s, v6.4s, #25 sli v7.4s, v6.4s, #7 ushr v6.4s, v5.4s, #25 sli v6.4s, v5.4s, #7 ushr v5.4s, v20.4s, #25 sli v5.4s, v20.4s, #7 ext v5.16b, v5.16b, v5.16b, #12 ext v6.16b, v6.16b, v6.16b, #12 ext v7.16b, v7.16b, v7.16b, #12 ext v10.16b, v10.16b, v10.16b, #8 ext v11.16b, v11.16b, v11.16b, #8 ext v12.16b, v12.16b, v12.16b, #8 ext v15.16b, v15.16b, v15.16b, #4 ext v16.16b, v16.16b, v16.16b, #4 ext v17.16b, v17.16b, v17.16b, #4 subs x6, x6, #1 b.hi Lopen_128_rounds add v0.4s, v0.4s, v24.4s add v1.4s, v1.4s, v24.4s add v2.4s, v2.4s, v24.4s add v5.4s, v5.4s, v28.4s add v6.4s, v6.4s, v28.4s add v7.4s, v7.4s, v28.4s add v10.4s, v10.4s, v29.4s add v11.4s, v11.4s, v29.4s add v30.4s, v30.4s, v25.4s add v15.4s, v15.4s, v30.4s add v30.4s, v30.4s, v25.4s add v16.4s, v16.4s, v30.4s and v2.16b, v2.16b, v27.16b mov x16, v2.d[0] // Move the R key to GPRs mov x17, v2.d[1] mov v27.16b, v7.16b // Store the S key bl Lpoly_hash_ad_internal Lopen_128_store: cmp x2, #64 b.lt Lopen_128_store_64 ld1 {v20.16b - v23.16b}, [x1], #64 mov x11, v20.d[0] mov x12, v20.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v21.d[0] mov x12, v21.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v22.d[0] mov x12, v22.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most mov x11, v23.d[0] mov x12, v23.d[1] adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most eor v20.16b, v20.16b, v0.16b eor v21.16b, v21.16b, v5.16b eor v22.16b, v22.16b, v10.16b eor v23.16b, v23.16b, v15.16b st1 {v20.16b - v23.16b}, [x0], #64 sub x2, x2, #64 mov v0.16b, v1.16b mov v5.16b, v6.16b mov v10.16b, v11.16b mov v15.16b, v16.16b Lopen_128_store_64: lsr x4, x2, #4 mov x3, x1 Lopen_128_hash_64: cbz x4, Lopen_tail_64_store ldp x11, x12, [x3], 16 adds x8, x8, x11 adcs x9, x9, x12 adc x10, x10, x15 mul x11, x8, x16 // [t2:t1:t0] = [acc2:acc1:acc0] * r0 umulh x12, x8, x16 mul x13, x9, x16 umulh x14, x9, x16 adds x12, x12, x13 mul x13, x10, x16 adc x13, x13, x14 mul x14, x8, x17 // [t3:t2:t1:t0] = [acc2:acc1:acc0] * [r1:r0] umulh x8, x8, x17 adds x12, x12, x14 mul x14, x9, x17 umulh x9, x9, x17 adcs x14, x14, x8 mul x10, x10, x17 adc x10, x10, x9 adds x13, x13, x14 adc x14, x10, xzr and x10, x13, #3 // At this point acc2 is 2 bits at most (value of 3) and x8, x13, #-4 extr x13, x14, x13, #2 adds x8, x8, x11 lsr x11, x14, #2 adc x9, x14, x11 // No carry out since t0 is 61 bits and t3 is 63 bits adds x8, x8, x13 adcs x9, x9, x12 adc x10, x10, xzr // At this point acc2 has the value of 4 at most sub x4, x4, #1 b Lopen_128_hash_64 .cfi_endproc #endif // !OPENSSL_NO_ASM && defined(OPENSSL_AARCH64) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
7,613
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/test/trampoline-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _abi_test_trampoline .private_extern _abi_test_trampoline .p2align 4 _abi_test_trampoline: _CET_ENDBR subq $120,%rsp movq %r8,48(%rsp) movq %rbx,64(%rsp) movq %rbp,72(%rsp) movq %r12,80(%rsp) movq %r13,88(%rsp) movq %r14,96(%rsp) movq %r15,104(%rsp) movq 0(%rsi),%rbx movq 8(%rsi),%rbp movq 16(%rsi),%r12 movq 24(%rsi),%r13 movq 32(%rsi),%r14 movq 40(%rsi),%r15 movq %rdi,32(%rsp) movq %rsi,40(%rsp) movq %rdx,%r10 movq %rcx,%r11 decq %r11 js L$args_done movq (%r10),%rdi addq $8,%r10 decq %r11 js L$args_done movq (%r10),%rsi addq $8,%r10 decq %r11 js L$args_done movq (%r10),%rdx addq $8,%r10 decq %r11 js L$args_done movq (%r10),%rcx addq $8,%r10 decq %r11 js L$args_done movq (%r10),%r8 addq $8,%r10 decq %r11 js L$args_done movq (%r10),%r9 addq $8,%r10 leaq 0(%rsp),%rax L$args_loop: decq %r11 js L$args_done movq %r11,56(%rsp) movq (%r10),%r11 movq %r11,(%rax) movq 56(%rsp),%r11 addq $8,%r10 addq $8,%rax jmp L$args_loop L$args_done: movq 32(%rsp),%rax movq 48(%rsp),%r10 testq %r10,%r10 jz L$no_unwind pushfq orq $0x100,0(%rsp) popfq nop .globl _abi_test_unwind_start .private_extern _abi_test_unwind_start _abi_test_unwind_start: call *%rax .globl _abi_test_unwind_return .private_extern _abi_test_unwind_return _abi_test_unwind_return: pushfq andq $-0x101,0(%rsp) popfq .globl _abi_test_unwind_stop .private_extern _abi_test_unwind_stop _abi_test_unwind_stop: jmp L$call_done L$no_unwind: call *%rax L$call_done: movq 40(%rsp),%rsi movq %rbx,0(%rsi) movq %rbp,8(%rsi) movq %r12,16(%rsi) movq %r13,24(%rsi) movq %r14,32(%rsi) movq %r15,40(%rsi) movq 64(%rsp),%rbx movq 72(%rsp),%rbp movq 80(%rsp),%r12 movq 88(%rsp),%r13 movq 96(%rsp),%r14 movq 104(%rsp),%r15 addq $120,%rsp .byte 0xf3,0xc3 .globl _abi_test_clobber_rax .private_extern _abi_test_clobber_rax .p2align 4 _abi_test_clobber_rax: _CET_ENDBR xorq %rax,%rax .byte 0xf3,0xc3 .globl _abi_test_clobber_rbx .private_extern _abi_test_clobber_rbx .p2align 4 _abi_test_clobber_rbx: _CET_ENDBR xorq %rbx,%rbx .byte 0xf3,0xc3 .globl _abi_test_clobber_rcx .private_extern _abi_test_clobber_rcx .p2align 4 _abi_test_clobber_rcx: _CET_ENDBR xorq %rcx,%rcx .byte 0xf3,0xc3 .globl _abi_test_clobber_rdx .private_extern _abi_test_clobber_rdx .p2align 4 _abi_test_clobber_rdx: _CET_ENDBR xorq %rdx,%rdx .byte 0xf3,0xc3 .globl _abi_test_clobber_rdi .private_extern _abi_test_clobber_rdi .p2align 4 _abi_test_clobber_rdi: _CET_ENDBR xorq %rdi,%rdi .byte 0xf3,0xc3 .globl _abi_test_clobber_rsi .private_extern _abi_test_clobber_rsi .p2align 4 _abi_test_clobber_rsi: _CET_ENDBR xorq %rsi,%rsi .byte 0xf3,0xc3 .globl _abi_test_clobber_rbp .private_extern _abi_test_clobber_rbp .p2align 4 _abi_test_clobber_rbp: _CET_ENDBR xorq %rbp,%rbp .byte 0xf3,0xc3 .globl _abi_test_clobber_r8 .private_extern _abi_test_clobber_r8 .p2align 4 _abi_test_clobber_r8: _CET_ENDBR xorq %r8,%r8 .byte 0xf3,0xc3 .globl _abi_test_clobber_r9 .private_extern _abi_test_clobber_r9 .p2align 4 _abi_test_clobber_r9: _CET_ENDBR xorq %r9,%r9 .byte 0xf3,0xc3 .globl _abi_test_clobber_r10 .private_extern _abi_test_clobber_r10 .p2align 4 _abi_test_clobber_r10: _CET_ENDBR xorq %r10,%r10 .byte 0xf3,0xc3 .globl _abi_test_clobber_r11 .private_extern _abi_test_clobber_r11 .p2align 4 _abi_test_clobber_r11: _CET_ENDBR xorq %r11,%r11 .byte 0xf3,0xc3 .globl _abi_test_clobber_r12 .private_extern _abi_test_clobber_r12 .p2align 4 _abi_test_clobber_r12: _CET_ENDBR xorq %r12,%r12 .byte 0xf3,0xc3 .globl _abi_test_clobber_r13 .private_extern _abi_test_clobber_r13 .p2align 4 _abi_test_clobber_r13: _CET_ENDBR xorq %r13,%r13 .byte 0xf3,0xc3 .globl _abi_test_clobber_r14 .private_extern _abi_test_clobber_r14 .p2align 4 _abi_test_clobber_r14: _CET_ENDBR xorq %r14,%r14 .byte 0xf3,0xc3 .globl _abi_test_clobber_r15 .private_extern _abi_test_clobber_r15 .p2align 4 _abi_test_clobber_r15: _CET_ENDBR xorq %r15,%r15 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm0 .private_extern _abi_test_clobber_xmm0 .p2align 4 _abi_test_clobber_xmm0: _CET_ENDBR pxor %xmm0,%xmm0 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm1 .private_extern _abi_test_clobber_xmm1 .p2align 4 _abi_test_clobber_xmm1: _CET_ENDBR pxor %xmm1,%xmm1 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm2 .private_extern _abi_test_clobber_xmm2 .p2align 4 _abi_test_clobber_xmm2: _CET_ENDBR pxor %xmm2,%xmm2 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm3 .private_extern _abi_test_clobber_xmm3 .p2align 4 _abi_test_clobber_xmm3: _CET_ENDBR pxor %xmm3,%xmm3 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm4 .private_extern _abi_test_clobber_xmm4 .p2align 4 _abi_test_clobber_xmm4: _CET_ENDBR pxor %xmm4,%xmm4 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm5 .private_extern _abi_test_clobber_xmm5 .p2align 4 _abi_test_clobber_xmm5: _CET_ENDBR pxor %xmm5,%xmm5 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm6 .private_extern _abi_test_clobber_xmm6 .p2align 4 _abi_test_clobber_xmm6: _CET_ENDBR pxor %xmm6,%xmm6 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm7 .private_extern _abi_test_clobber_xmm7 .p2align 4 _abi_test_clobber_xmm7: _CET_ENDBR pxor %xmm7,%xmm7 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm8 .private_extern _abi_test_clobber_xmm8 .p2align 4 _abi_test_clobber_xmm8: _CET_ENDBR pxor %xmm8,%xmm8 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm9 .private_extern _abi_test_clobber_xmm9 .p2align 4 _abi_test_clobber_xmm9: _CET_ENDBR pxor %xmm9,%xmm9 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm10 .private_extern _abi_test_clobber_xmm10 .p2align 4 _abi_test_clobber_xmm10: _CET_ENDBR pxor %xmm10,%xmm10 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm11 .private_extern _abi_test_clobber_xmm11 .p2align 4 _abi_test_clobber_xmm11: _CET_ENDBR pxor %xmm11,%xmm11 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm12 .private_extern _abi_test_clobber_xmm12 .p2align 4 _abi_test_clobber_xmm12: _CET_ENDBR pxor %xmm12,%xmm12 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm13 .private_extern _abi_test_clobber_xmm13 .p2align 4 _abi_test_clobber_xmm13: _CET_ENDBR pxor %xmm13,%xmm13 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm14 .private_extern _abi_test_clobber_xmm14 .p2align 4 _abi_test_clobber_xmm14: _CET_ENDBR pxor %xmm14,%xmm14 .byte 0xf3,0xc3 .globl _abi_test_clobber_xmm15 .private_extern _abi_test_clobber_xmm15 .p2align 4 _abi_test_clobber_xmm15: _CET_ENDBR pxor %xmm15,%xmm15 .byte 0xf3,0xc3 .globl _abi_test_bad_unwind_wrong_register .private_extern _abi_test_bad_unwind_wrong_register .p2align 4 _abi_test_bad_unwind_wrong_register: _CET_ENDBR pushq %r12 nop popq %r12 .byte 0xf3,0xc3 .globl _abi_test_bad_unwind_temporary .private_extern _abi_test_bad_unwind_temporary .p2align 4 _abi_test_bad_unwind_temporary: _CET_ENDBR pushq %r12 movq %r12,%rax incq %rax movq %rax,(%rsp) movq %r12,(%rsp) popq %r12 .byte 0xf3,0xc3 .globl _abi_test_get_and_clear_direction_flag .private_extern _abi_test_get_and_clear_direction_flag _abi_test_get_and_clear_direction_flag: _CET_ENDBR pushfq popq %rax andq $0x400,%rax shrq $10,%rax cld .byte 0xf3,0xc3 .globl _abi_test_set_direction_flag .private_extern _abi_test_set_direction_flag _abi_test_set_direction_flag: _CET_ENDBR std .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
69,274
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/sha256-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _sha256_block_data_order_nohw .private_extern _sha256_block_data_order_nohw .p2align 4 _sha256_block_data_order_nohw: _CET_ENDBR movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 shlq $4,%rdx subq $64+32,%rsp leaq (%rsi,%rdx,4),%rdx andq $-64,%rsp movq %rdi,64+0(%rsp) movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %rax,88(%rsp) L$prologue: movl 0(%rdi),%eax movl 4(%rdi),%ebx movl 8(%rdi),%ecx movl 12(%rdi),%edx movl 16(%rdi),%r8d movl 20(%rdi),%r9d movl 24(%rdi),%r10d movl 28(%rdi),%r11d jmp L$loop .p2align 4 L$loop: movl %ebx,%edi leaq K256(%rip),%rbp xorl %ecx,%edi movl 0(%rsi),%r12d movl %r8d,%r13d movl %eax,%r14d bswapl %r12d rorl $14,%r13d movl %r9d,%r15d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r15d movl %r12d,0(%rsp) xorl %eax,%r14d andl %r8d,%r15d rorl $5,%r13d addl %r11d,%r12d xorl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r13d addl %r15d,%r12d movl %eax,%r15d addl (%rbp),%r12d xorl %eax,%r14d xorl %ebx,%r15d rorl $6,%r13d movl %ebx,%r11d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r11d addl %r12d,%edx addl %r12d,%r11d leaq 4(%rbp),%rbp addl %r14d,%r11d movl 4(%rsi),%r12d movl %edx,%r13d movl %r11d,%r14d bswapl %r12d rorl $14,%r13d movl %r8d,%edi xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%edi movl %r12d,4(%rsp) xorl %r11d,%r14d andl %edx,%edi rorl $5,%r13d addl %r10d,%r12d xorl %r9d,%edi rorl $11,%r14d xorl %edx,%r13d addl %edi,%r12d movl %r11d,%edi addl (%rbp),%r12d xorl %r11d,%r14d xorl %eax,%edi rorl $6,%r13d movl %eax,%r10d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r10d addl %r12d,%ecx addl %r12d,%r10d leaq 4(%rbp),%rbp addl %r14d,%r10d movl 8(%rsi),%r12d movl %ecx,%r13d movl %r10d,%r14d bswapl %r12d rorl $14,%r13d movl %edx,%r15d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r15d movl %r12d,8(%rsp) xorl %r10d,%r14d andl %ecx,%r15d rorl $5,%r13d addl %r9d,%r12d xorl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r15d addl (%rbp),%r12d xorl %r10d,%r14d xorl %r11d,%r15d rorl $6,%r13d movl %r11d,%r9d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r9d addl %r12d,%ebx addl %r12d,%r9d leaq 4(%rbp),%rbp addl %r14d,%r9d movl 12(%rsi),%r12d movl %ebx,%r13d movl %r9d,%r14d bswapl %r12d rorl $14,%r13d movl %ecx,%edi xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%edi movl %r12d,12(%rsp) xorl %r9d,%r14d andl %ebx,%edi rorl $5,%r13d addl %r8d,%r12d xorl %edx,%edi rorl $11,%r14d xorl %ebx,%r13d addl %edi,%r12d movl %r9d,%edi addl (%rbp),%r12d xorl %r9d,%r14d xorl %r10d,%edi rorl $6,%r13d movl %r10d,%r8d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r8d addl %r12d,%eax addl %r12d,%r8d leaq 20(%rbp),%rbp addl %r14d,%r8d movl 16(%rsi),%r12d movl %eax,%r13d movl %r8d,%r14d bswapl %r12d rorl $14,%r13d movl %ebx,%r15d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r15d movl %r12d,16(%rsp) xorl %r8d,%r14d andl %eax,%r15d rorl $5,%r13d addl %edx,%r12d xorl %ecx,%r15d rorl $11,%r14d xorl %eax,%r13d addl %r15d,%r12d movl %r8d,%r15d addl (%rbp),%r12d xorl %r8d,%r14d xorl %r9d,%r15d rorl $6,%r13d movl %r9d,%edx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%edx addl %r12d,%r11d addl %r12d,%edx leaq 4(%rbp),%rbp addl %r14d,%edx movl 20(%rsi),%r12d movl %r11d,%r13d movl %edx,%r14d bswapl %r12d rorl $14,%r13d movl %eax,%edi xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%edi movl %r12d,20(%rsp) xorl %edx,%r14d andl %r11d,%edi rorl $5,%r13d addl %ecx,%r12d xorl %ebx,%edi rorl $11,%r14d xorl %r11d,%r13d addl %edi,%r12d movl %edx,%edi addl (%rbp),%r12d xorl %edx,%r14d xorl %r8d,%edi rorl $6,%r13d movl %r8d,%ecx andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%ecx addl %r12d,%r10d addl %r12d,%ecx leaq 4(%rbp),%rbp addl %r14d,%ecx movl 24(%rsi),%r12d movl %r10d,%r13d movl %ecx,%r14d bswapl %r12d rorl $14,%r13d movl %r11d,%r15d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r15d movl %r12d,24(%rsp) xorl %ecx,%r14d andl %r10d,%r15d rorl $5,%r13d addl %ebx,%r12d xorl %eax,%r15d rorl $11,%r14d xorl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r15d addl (%rbp),%r12d xorl %ecx,%r14d xorl %edx,%r15d rorl $6,%r13d movl %edx,%ebx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%ebx addl %r12d,%r9d addl %r12d,%ebx leaq 4(%rbp),%rbp addl %r14d,%ebx movl 28(%rsi),%r12d movl %r9d,%r13d movl %ebx,%r14d bswapl %r12d rorl $14,%r13d movl %r10d,%edi xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%edi movl %r12d,28(%rsp) xorl %ebx,%r14d andl %r9d,%edi rorl $5,%r13d addl %eax,%r12d xorl %r11d,%edi rorl $11,%r14d xorl %r9d,%r13d addl %edi,%r12d movl %ebx,%edi addl (%rbp),%r12d xorl %ebx,%r14d xorl %ecx,%edi rorl $6,%r13d movl %ecx,%eax andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%eax addl %r12d,%r8d addl %r12d,%eax leaq 20(%rbp),%rbp addl %r14d,%eax movl 32(%rsi),%r12d movl %r8d,%r13d movl %eax,%r14d bswapl %r12d rorl $14,%r13d movl %r9d,%r15d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r15d movl %r12d,32(%rsp) xorl %eax,%r14d andl %r8d,%r15d rorl $5,%r13d addl %r11d,%r12d xorl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r13d addl %r15d,%r12d movl %eax,%r15d addl (%rbp),%r12d xorl %eax,%r14d xorl %ebx,%r15d rorl $6,%r13d movl %ebx,%r11d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r11d addl %r12d,%edx addl %r12d,%r11d leaq 4(%rbp),%rbp addl %r14d,%r11d movl 36(%rsi),%r12d movl %edx,%r13d movl %r11d,%r14d bswapl %r12d rorl $14,%r13d movl %r8d,%edi xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%edi movl %r12d,36(%rsp) xorl %r11d,%r14d andl %edx,%edi rorl $5,%r13d addl %r10d,%r12d xorl %r9d,%edi rorl $11,%r14d xorl %edx,%r13d addl %edi,%r12d movl %r11d,%edi addl (%rbp),%r12d xorl %r11d,%r14d xorl %eax,%edi rorl $6,%r13d movl %eax,%r10d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r10d addl %r12d,%ecx addl %r12d,%r10d leaq 4(%rbp),%rbp addl %r14d,%r10d movl 40(%rsi),%r12d movl %ecx,%r13d movl %r10d,%r14d bswapl %r12d rorl $14,%r13d movl %edx,%r15d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r15d movl %r12d,40(%rsp) xorl %r10d,%r14d andl %ecx,%r15d rorl $5,%r13d addl %r9d,%r12d xorl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r15d addl (%rbp),%r12d xorl %r10d,%r14d xorl %r11d,%r15d rorl $6,%r13d movl %r11d,%r9d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r9d addl %r12d,%ebx addl %r12d,%r9d leaq 4(%rbp),%rbp addl %r14d,%r9d movl 44(%rsi),%r12d movl %ebx,%r13d movl %r9d,%r14d bswapl %r12d rorl $14,%r13d movl %ecx,%edi xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%edi movl %r12d,44(%rsp) xorl %r9d,%r14d andl %ebx,%edi rorl $5,%r13d addl %r8d,%r12d xorl %edx,%edi rorl $11,%r14d xorl %ebx,%r13d addl %edi,%r12d movl %r9d,%edi addl (%rbp),%r12d xorl %r9d,%r14d xorl %r10d,%edi rorl $6,%r13d movl %r10d,%r8d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r8d addl %r12d,%eax addl %r12d,%r8d leaq 20(%rbp),%rbp addl %r14d,%r8d movl 48(%rsi),%r12d movl %eax,%r13d movl %r8d,%r14d bswapl %r12d rorl $14,%r13d movl %ebx,%r15d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r15d movl %r12d,48(%rsp) xorl %r8d,%r14d andl %eax,%r15d rorl $5,%r13d addl %edx,%r12d xorl %ecx,%r15d rorl $11,%r14d xorl %eax,%r13d addl %r15d,%r12d movl %r8d,%r15d addl (%rbp),%r12d xorl %r8d,%r14d xorl %r9d,%r15d rorl $6,%r13d movl %r9d,%edx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%edx addl %r12d,%r11d addl %r12d,%edx leaq 4(%rbp),%rbp addl %r14d,%edx movl 52(%rsi),%r12d movl %r11d,%r13d movl %edx,%r14d bswapl %r12d rorl $14,%r13d movl %eax,%edi xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%edi movl %r12d,52(%rsp) xorl %edx,%r14d andl %r11d,%edi rorl $5,%r13d addl %ecx,%r12d xorl %ebx,%edi rorl $11,%r14d xorl %r11d,%r13d addl %edi,%r12d movl %edx,%edi addl (%rbp),%r12d xorl %edx,%r14d xorl %r8d,%edi rorl $6,%r13d movl %r8d,%ecx andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%ecx addl %r12d,%r10d addl %r12d,%ecx leaq 4(%rbp),%rbp addl %r14d,%ecx movl 56(%rsi),%r12d movl %r10d,%r13d movl %ecx,%r14d bswapl %r12d rorl $14,%r13d movl %r11d,%r15d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r15d movl %r12d,56(%rsp) xorl %ecx,%r14d andl %r10d,%r15d rorl $5,%r13d addl %ebx,%r12d xorl %eax,%r15d rorl $11,%r14d xorl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r15d addl (%rbp),%r12d xorl %ecx,%r14d xorl %edx,%r15d rorl $6,%r13d movl %edx,%ebx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%ebx addl %r12d,%r9d addl %r12d,%ebx leaq 4(%rbp),%rbp addl %r14d,%ebx movl 60(%rsi),%r12d movl %r9d,%r13d movl %ebx,%r14d bswapl %r12d rorl $14,%r13d movl %r10d,%edi xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%edi movl %r12d,60(%rsp) xorl %ebx,%r14d andl %r9d,%edi rorl $5,%r13d addl %eax,%r12d xorl %r11d,%edi rorl $11,%r14d xorl %r9d,%r13d addl %edi,%r12d movl %ebx,%edi addl (%rbp),%r12d xorl %ebx,%r14d xorl %ecx,%edi rorl $6,%r13d movl %ecx,%eax andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%eax addl %r12d,%r8d addl %r12d,%eax leaq 20(%rbp),%rbp jmp L$rounds_16_xx .p2align 4 L$rounds_16_xx: movl 4(%rsp),%r13d movl 56(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%eax movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 36(%rsp),%r12d addl 0(%rsp),%r12d movl %r8d,%r13d addl %r15d,%r12d movl %eax,%r14d rorl $14,%r13d movl %r9d,%r15d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r15d movl %r12d,0(%rsp) xorl %eax,%r14d andl %r8d,%r15d rorl $5,%r13d addl %r11d,%r12d xorl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r13d addl %r15d,%r12d movl %eax,%r15d addl (%rbp),%r12d xorl %eax,%r14d xorl %ebx,%r15d rorl $6,%r13d movl %ebx,%r11d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r11d addl %r12d,%edx addl %r12d,%r11d leaq 4(%rbp),%rbp movl 8(%rsp),%r13d movl 60(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r11d movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 40(%rsp),%r12d addl 4(%rsp),%r12d movl %edx,%r13d addl %edi,%r12d movl %r11d,%r14d rorl $14,%r13d movl %r8d,%edi xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%edi movl %r12d,4(%rsp) xorl %r11d,%r14d andl %edx,%edi rorl $5,%r13d addl %r10d,%r12d xorl %r9d,%edi rorl $11,%r14d xorl %edx,%r13d addl %edi,%r12d movl %r11d,%edi addl (%rbp),%r12d xorl %r11d,%r14d xorl %eax,%edi rorl $6,%r13d movl %eax,%r10d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r10d addl %r12d,%ecx addl %r12d,%r10d leaq 4(%rbp),%rbp movl 12(%rsp),%r13d movl 0(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r10d movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 44(%rsp),%r12d addl 8(%rsp),%r12d movl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r14d rorl $14,%r13d movl %edx,%r15d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r15d movl %r12d,8(%rsp) xorl %r10d,%r14d andl %ecx,%r15d rorl $5,%r13d addl %r9d,%r12d xorl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r15d addl (%rbp),%r12d xorl %r10d,%r14d xorl %r11d,%r15d rorl $6,%r13d movl %r11d,%r9d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r9d addl %r12d,%ebx addl %r12d,%r9d leaq 4(%rbp),%rbp movl 16(%rsp),%r13d movl 4(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r9d movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 48(%rsp),%r12d addl 12(%rsp),%r12d movl %ebx,%r13d addl %edi,%r12d movl %r9d,%r14d rorl $14,%r13d movl %ecx,%edi xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%edi movl %r12d,12(%rsp) xorl %r9d,%r14d andl %ebx,%edi rorl $5,%r13d addl %r8d,%r12d xorl %edx,%edi rorl $11,%r14d xorl %ebx,%r13d addl %edi,%r12d movl %r9d,%edi addl (%rbp),%r12d xorl %r9d,%r14d xorl %r10d,%edi rorl $6,%r13d movl %r10d,%r8d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r8d addl %r12d,%eax addl %r12d,%r8d leaq 20(%rbp),%rbp movl 20(%rsp),%r13d movl 8(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r8d movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 52(%rsp),%r12d addl 16(%rsp),%r12d movl %eax,%r13d addl %r15d,%r12d movl %r8d,%r14d rorl $14,%r13d movl %ebx,%r15d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r15d movl %r12d,16(%rsp) xorl %r8d,%r14d andl %eax,%r15d rorl $5,%r13d addl %edx,%r12d xorl %ecx,%r15d rorl $11,%r14d xorl %eax,%r13d addl %r15d,%r12d movl %r8d,%r15d addl (%rbp),%r12d xorl %r8d,%r14d xorl %r9d,%r15d rorl $6,%r13d movl %r9d,%edx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%edx addl %r12d,%r11d addl %r12d,%edx leaq 4(%rbp),%rbp movl 24(%rsp),%r13d movl 12(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%edx movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 56(%rsp),%r12d addl 20(%rsp),%r12d movl %r11d,%r13d addl %edi,%r12d movl %edx,%r14d rorl $14,%r13d movl %eax,%edi xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%edi movl %r12d,20(%rsp) xorl %edx,%r14d andl %r11d,%edi rorl $5,%r13d addl %ecx,%r12d xorl %ebx,%edi rorl $11,%r14d xorl %r11d,%r13d addl %edi,%r12d movl %edx,%edi addl (%rbp),%r12d xorl %edx,%r14d xorl %r8d,%edi rorl $6,%r13d movl %r8d,%ecx andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%ecx addl %r12d,%r10d addl %r12d,%ecx leaq 4(%rbp),%rbp movl 28(%rsp),%r13d movl 16(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%ecx movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 60(%rsp),%r12d addl 24(%rsp),%r12d movl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r14d rorl $14,%r13d movl %r11d,%r15d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r15d movl %r12d,24(%rsp) xorl %ecx,%r14d andl %r10d,%r15d rorl $5,%r13d addl %ebx,%r12d xorl %eax,%r15d rorl $11,%r14d xorl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r15d addl (%rbp),%r12d xorl %ecx,%r14d xorl %edx,%r15d rorl $6,%r13d movl %edx,%ebx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%ebx addl %r12d,%r9d addl %r12d,%ebx leaq 4(%rbp),%rbp movl 32(%rsp),%r13d movl 20(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%ebx movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 0(%rsp),%r12d addl 28(%rsp),%r12d movl %r9d,%r13d addl %edi,%r12d movl %ebx,%r14d rorl $14,%r13d movl %r10d,%edi xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%edi movl %r12d,28(%rsp) xorl %ebx,%r14d andl %r9d,%edi rorl $5,%r13d addl %eax,%r12d xorl %r11d,%edi rorl $11,%r14d xorl %r9d,%r13d addl %edi,%r12d movl %ebx,%edi addl (%rbp),%r12d xorl %ebx,%r14d xorl %ecx,%edi rorl $6,%r13d movl %ecx,%eax andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%eax addl %r12d,%r8d addl %r12d,%eax leaq 20(%rbp),%rbp movl 36(%rsp),%r13d movl 24(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%eax movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 4(%rsp),%r12d addl 32(%rsp),%r12d movl %r8d,%r13d addl %r15d,%r12d movl %eax,%r14d rorl $14,%r13d movl %r9d,%r15d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r15d movl %r12d,32(%rsp) xorl %eax,%r14d andl %r8d,%r15d rorl $5,%r13d addl %r11d,%r12d xorl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r13d addl %r15d,%r12d movl %eax,%r15d addl (%rbp),%r12d xorl %eax,%r14d xorl %ebx,%r15d rorl $6,%r13d movl %ebx,%r11d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r11d addl %r12d,%edx addl %r12d,%r11d leaq 4(%rbp),%rbp movl 40(%rsp),%r13d movl 28(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r11d movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 8(%rsp),%r12d addl 36(%rsp),%r12d movl %edx,%r13d addl %edi,%r12d movl %r11d,%r14d rorl $14,%r13d movl %r8d,%edi xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%edi movl %r12d,36(%rsp) xorl %r11d,%r14d andl %edx,%edi rorl $5,%r13d addl %r10d,%r12d xorl %r9d,%edi rorl $11,%r14d xorl %edx,%r13d addl %edi,%r12d movl %r11d,%edi addl (%rbp),%r12d xorl %r11d,%r14d xorl %eax,%edi rorl $6,%r13d movl %eax,%r10d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r10d addl %r12d,%ecx addl %r12d,%r10d leaq 4(%rbp),%rbp movl 44(%rsp),%r13d movl 32(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r10d movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 12(%rsp),%r12d addl 40(%rsp),%r12d movl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r14d rorl $14,%r13d movl %edx,%r15d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r15d movl %r12d,40(%rsp) xorl %r10d,%r14d andl %ecx,%r15d rorl $5,%r13d addl %r9d,%r12d xorl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r15d addl (%rbp),%r12d xorl %r10d,%r14d xorl %r11d,%r15d rorl $6,%r13d movl %r11d,%r9d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r9d addl %r12d,%ebx addl %r12d,%r9d leaq 4(%rbp),%rbp movl 48(%rsp),%r13d movl 36(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r9d movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 16(%rsp),%r12d addl 44(%rsp),%r12d movl %ebx,%r13d addl %edi,%r12d movl %r9d,%r14d rorl $14,%r13d movl %ecx,%edi xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%edi movl %r12d,44(%rsp) xorl %r9d,%r14d andl %ebx,%edi rorl $5,%r13d addl %r8d,%r12d xorl %edx,%edi rorl $11,%r14d xorl %ebx,%r13d addl %edi,%r12d movl %r9d,%edi addl (%rbp),%r12d xorl %r9d,%r14d xorl %r10d,%edi rorl $6,%r13d movl %r10d,%r8d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r8d addl %r12d,%eax addl %r12d,%r8d leaq 20(%rbp),%rbp movl 52(%rsp),%r13d movl 40(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r8d movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 20(%rsp),%r12d addl 48(%rsp),%r12d movl %eax,%r13d addl %r15d,%r12d movl %r8d,%r14d rorl $14,%r13d movl %ebx,%r15d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r15d movl %r12d,48(%rsp) xorl %r8d,%r14d andl %eax,%r15d rorl $5,%r13d addl %edx,%r12d xorl %ecx,%r15d rorl $11,%r14d xorl %eax,%r13d addl %r15d,%r12d movl %r8d,%r15d addl (%rbp),%r12d xorl %r8d,%r14d xorl %r9d,%r15d rorl $6,%r13d movl %r9d,%edx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%edx addl %r12d,%r11d addl %r12d,%edx leaq 4(%rbp),%rbp movl 56(%rsp),%r13d movl 44(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%edx movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 24(%rsp),%r12d addl 52(%rsp),%r12d movl %r11d,%r13d addl %edi,%r12d movl %edx,%r14d rorl $14,%r13d movl %eax,%edi xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%edi movl %r12d,52(%rsp) xorl %edx,%r14d andl %r11d,%edi rorl $5,%r13d addl %ecx,%r12d xorl %ebx,%edi rorl $11,%r14d xorl %r11d,%r13d addl %edi,%r12d movl %edx,%edi addl (%rbp),%r12d xorl %edx,%r14d xorl %r8d,%edi rorl $6,%r13d movl %r8d,%ecx andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%ecx addl %r12d,%r10d addl %r12d,%ecx leaq 4(%rbp),%rbp movl 60(%rsp),%r13d movl 48(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%ecx movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 28(%rsp),%r12d addl 56(%rsp),%r12d movl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r14d rorl $14,%r13d movl %r11d,%r15d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r15d movl %r12d,56(%rsp) xorl %ecx,%r14d andl %r10d,%r15d rorl $5,%r13d addl %ebx,%r12d xorl %eax,%r15d rorl $11,%r14d xorl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r15d addl (%rbp),%r12d xorl %ecx,%r14d xorl %edx,%r15d rorl $6,%r13d movl %edx,%ebx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%ebx addl %r12d,%r9d addl %r12d,%ebx leaq 4(%rbp),%rbp movl 0(%rsp),%r13d movl 52(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%ebx movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 32(%rsp),%r12d addl 60(%rsp),%r12d movl %r9d,%r13d addl %edi,%r12d movl %ebx,%r14d rorl $14,%r13d movl %r10d,%edi xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%edi movl %r12d,60(%rsp) xorl %ebx,%r14d andl %r9d,%edi rorl $5,%r13d addl %eax,%r12d xorl %r11d,%edi rorl $11,%r14d xorl %r9d,%r13d addl %edi,%r12d movl %ebx,%edi addl (%rbp),%r12d xorl %ebx,%r14d xorl %ecx,%edi rorl $6,%r13d movl %ecx,%eax andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%eax addl %r12d,%r8d addl %r12d,%eax leaq 20(%rbp),%rbp cmpb $0,3(%rbp) jnz L$rounds_16_xx movq 64+0(%rsp),%rdi addl %r14d,%eax leaq 64(%rsi),%rsi addl 0(%rdi),%eax addl 4(%rdi),%ebx addl 8(%rdi),%ecx addl 12(%rdi),%edx addl 16(%rdi),%r8d addl 20(%rdi),%r9d addl 24(%rdi),%r10d addl 28(%rdi),%r11d cmpq 64+16(%rsp),%rsi movl %eax,0(%rdi) movl %ebx,4(%rdi) movl %ecx,8(%rdi) movl %edx,12(%rdi) movl %r8d,16(%rdi) movl %r9d,20(%rdi) movl %r10d,24(%rdi) movl %r11d,28(%rdi) jb L$loop movq 88(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue: .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 K256: .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff .long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff .long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908 .long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908 .byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .globl _sha256_block_data_order_hw .private_extern _sha256_block_data_order_hw .p2align 6 _sha256_block_data_order_hw: #ifdef BORINGSSL_DISPATCH_TEST movb $1,_BORINGSSL_function_hit+6(%rip) #endif _CET_ENDBR leaq K256+128(%rip),%rcx movdqu (%rdi),%xmm1 movdqu 16(%rdi),%xmm2 movdqa 512-128(%rcx),%xmm7 pshufd $0x1b,%xmm1,%xmm0 pshufd $0xb1,%xmm1,%xmm1 pshufd $0x1b,%xmm2,%xmm2 movdqa %xmm7,%xmm8 .byte 102,15,58,15,202,8 punpcklqdq %xmm0,%xmm2 jmp L$oop_shaext .p2align 4 L$oop_shaext: movdqu (%rsi),%xmm3 movdqu 16(%rsi),%xmm4 movdqu 32(%rsi),%xmm5 .byte 102,15,56,0,223 movdqu 48(%rsi),%xmm6 movdqa 0-128(%rcx),%xmm0 paddd %xmm3,%xmm0 .byte 102,15,56,0,231 movdqa %xmm2,%xmm10 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 nop movdqa %xmm1,%xmm9 .byte 15,56,203,202 movdqa 32-128(%rcx),%xmm0 paddd %xmm4,%xmm0 .byte 102,15,56,0,239 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 leaq 64(%rsi),%rsi .byte 15,56,204,220 .byte 15,56,203,202 movdqa 64-128(%rcx),%xmm0 paddd %xmm5,%xmm0 .byte 102,15,56,0,247 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm6,%xmm7 .byte 102,15,58,15,253,4 nop paddd %xmm7,%xmm3 .byte 15,56,204,229 .byte 15,56,203,202 movdqa 96-128(%rcx),%xmm0 paddd %xmm6,%xmm0 .byte 15,56,205,222 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm3,%xmm7 .byte 102,15,58,15,254,4 nop paddd %xmm7,%xmm4 .byte 15,56,204,238 .byte 15,56,203,202 movdqa 128-128(%rcx),%xmm0 paddd %xmm3,%xmm0 .byte 15,56,205,227 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm4,%xmm7 .byte 102,15,58,15,251,4 nop paddd %xmm7,%xmm5 .byte 15,56,204,243 .byte 15,56,203,202 movdqa 160-128(%rcx),%xmm0 paddd %xmm4,%xmm0 .byte 15,56,205,236 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm5,%xmm7 .byte 102,15,58,15,252,4 nop paddd %xmm7,%xmm6 .byte 15,56,204,220 .byte 15,56,203,202 movdqa 192-128(%rcx),%xmm0 paddd %xmm5,%xmm0 .byte 15,56,205,245 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm6,%xmm7 .byte 102,15,58,15,253,4 nop paddd %xmm7,%xmm3 .byte 15,56,204,229 .byte 15,56,203,202 movdqa 224-128(%rcx),%xmm0 paddd %xmm6,%xmm0 .byte 15,56,205,222 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm3,%xmm7 .byte 102,15,58,15,254,4 nop paddd %xmm7,%xmm4 .byte 15,56,204,238 .byte 15,56,203,202 movdqa 256-128(%rcx),%xmm0 paddd %xmm3,%xmm0 .byte 15,56,205,227 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm4,%xmm7 .byte 102,15,58,15,251,4 nop paddd %xmm7,%xmm5 .byte 15,56,204,243 .byte 15,56,203,202 movdqa 288-128(%rcx),%xmm0 paddd %xmm4,%xmm0 .byte 15,56,205,236 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm5,%xmm7 .byte 102,15,58,15,252,4 nop paddd %xmm7,%xmm6 .byte 15,56,204,220 .byte 15,56,203,202 movdqa 320-128(%rcx),%xmm0 paddd %xmm5,%xmm0 .byte 15,56,205,245 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm6,%xmm7 .byte 102,15,58,15,253,4 nop paddd %xmm7,%xmm3 .byte 15,56,204,229 .byte 15,56,203,202 movdqa 352-128(%rcx),%xmm0 paddd %xmm6,%xmm0 .byte 15,56,205,222 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm3,%xmm7 .byte 102,15,58,15,254,4 nop paddd %xmm7,%xmm4 .byte 15,56,204,238 .byte 15,56,203,202 movdqa 384-128(%rcx),%xmm0 paddd %xmm3,%xmm0 .byte 15,56,205,227 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm4,%xmm7 .byte 102,15,58,15,251,4 nop paddd %xmm7,%xmm5 .byte 15,56,204,243 .byte 15,56,203,202 movdqa 416-128(%rcx),%xmm0 paddd %xmm4,%xmm0 .byte 15,56,205,236 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm5,%xmm7 .byte 102,15,58,15,252,4 .byte 15,56,203,202 paddd %xmm7,%xmm6 movdqa 448-128(%rcx),%xmm0 paddd %xmm5,%xmm0 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 .byte 15,56,205,245 movdqa %xmm8,%xmm7 .byte 15,56,203,202 movdqa 480-128(%rcx),%xmm0 paddd %xmm6,%xmm0 nop .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 decq %rdx nop .byte 15,56,203,202 paddd %xmm10,%xmm2 paddd %xmm9,%xmm1 jnz L$oop_shaext pshufd $0xb1,%xmm2,%xmm2 pshufd $0x1b,%xmm1,%xmm7 pshufd $0xb1,%xmm1,%xmm1 punpckhqdq %xmm2,%xmm1 .byte 102,15,58,15,215,8 movdqu %xmm1,(%rdi) movdqu %xmm2,16(%rdi) .byte 0xf3,0xc3 .globl _sha256_block_data_order_ssse3 .private_extern _sha256_block_data_order_ssse3 .p2align 6 _sha256_block_data_order_ssse3: _CET_ENDBR movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 shlq $4,%rdx subq $96,%rsp leaq (%rsi,%rdx,4),%rdx andq $-64,%rsp movq %rdi,64+0(%rsp) movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %rax,88(%rsp) L$prologue_ssse3: movl 0(%rdi),%eax movl 4(%rdi),%ebx movl 8(%rdi),%ecx movl 12(%rdi),%edx movl 16(%rdi),%r8d movl 20(%rdi),%r9d movl 24(%rdi),%r10d movl 28(%rdi),%r11d jmp L$loop_ssse3 .p2align 4 L$loop_ssse3: movdqa K256+512(%rip),%xmm7 movdqu 0(%rsi),%xmm0 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 .byte 102,15,56,0,199 movdqu 48(%rsi),%xmm3 leaq K256(%rip),%rbp .byte 102,15,56,0,207 movdqa 0(%rbp),%xmm4 movdqa 32(%rbp),%xmm5 .byte 102,15,56,0,215 paddd %xmm0,%xmm4 movdqa 64(%rbp),%xmm6 .byte 102,15,56,0,223 movdqa 96(%rbp),%xmm7 paddd %xmm1,%xmm5 paddd %xmm2,%xmm6 paddd %xmm3,%xmm7 movdqa %xmm4,0(%rsp) movl %eax,%r14d movdqa %xmm5,16(%rsp) movl %ebx,%edi movdqa %xmm6,32(%rsp) xorl %ecx,%edi movdqa %xmm7,48(%rsp) movl %r8d,%r13d jmp L$ssse3_00_47 .p2align 4 L$ssse3_00_47: subq $-128,%rbp rorl $14,%r13d movdqa %xmm1,%xmm4 movl %r14d,%eax movl %r9d,%r12d movdqa %xmm3,%xmm7 rorl $9,%r14d xorl %r8d,%r13d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d .byte 102,15,58,15,224,4 andl %r8d,%r12d xorl %r8d,%r13d .byte 102,15,58,15,250,4 addl 0(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d rorl $11,%r14d movdqa %xmm4,%xmm5 xorl %ebx,%r15d addl %r12d,%r11d movdqa %xmm4,%xmm6 rorl $6,%r13d andl %r15d,%edi psrld $3,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi paddd %xmm7,%xmm0 rorl $2,%r14d addl %r11d,%edx psrld $7,%xmm6 addl %edi,%r11d movl %edx,%r13d pshufd $250,%xmm3,%xmm7 addl %r11d,%r14d rorl $14,%r13d pslld $14,%xmm5 movl %r14d,%r11d movl %r8d,%r12d pxor %xmm6,%xmm4 rorl $9,%r14d xorl %edx,%r13d xorl %r9d,%r12d rorl $5,%r13d psrld $11,%xmm6 xorl %r11d,%r14d pxor %xmm5,%xmm4 andl %edx,%r12d xorl %edx,%r13d pslld $11,%xmm5 addl 4(%rsp),%r10d movl %r11d,%edi pxor %xmm6,%xmm4 xorl %r9d,%r12d rorl $11,%r14d movdqa %xmm7,%xmm6 xorl %eax,%edi addl %r12d,%r10d pxor %xmm5,%xmm4 rorl $6,%r13d andl %edi,%r15d xorl %r11d,%r14d psrld $10,%xmm7 addl %r13d,%r10d xorl %eax,%r15d paddd %xmm4,%xmm0 rorl $2,%r14d addl %r10d,%ecx psrlq $17,%xmm6 addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d pxor %xmm6,%xmm7 rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d rorl $9,%r14d psrlq $2,%xmm6 xorl %ecx,%r13d xorl %r8d,%r12d pxor %xmm6,%xmm7 rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d pshufd $128,%xmm7,%xmm7 xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d psrldq $8,%xmm7 xorl %r8d,%r12d rorl $11,%r14d xorl %r11d,%r15d addl %r12d,%r9d rorl $6,%r13d paddd %xmm7,%xmm0 andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d pshufd $80,%xmm0,%xmm7 xorl %r11d,%edi rorl $2,%r14d addl %r9d,%ebx movdqa %xmm7,%xmm6 addl %edi,%r9d movl %ebx,%r13d psrld $10,%xmm7 addl %r9d,%r14d rorl $14,%r13d psrlq $17,%xmm6 movl %r14d,%r9d movl %ecx,%r12d pxor %xmm6,%xmm7 rorl $9,%r14d xorl %ebx,%r13d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d psrlq $2,%xmm6 andl %ebx,%r12d xorl %ebx,%r13d addl 12(%rsp),%r8d pxor %xmm6,%xmm7 movl %r9d,%edi xorl %edx,%r12d rorl $11,%r14d pshufd $8,%xmm7,%xmm7 xorl %r10d,%edi addl %r12d,%r8d movdqa 0(%rbp),%xmm6 rorl $6,%r13d andl %edi,%r15d pslldq $8,%xmm7 xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d paddd %xmm7,%xmm0 rorl $2,%r14d addl %r8d,%eax addl %r15d,%r8d paddd %xmm0,%xmm6 movl %eax,%r13d addl %r8d,%r14d movdqa %xmm6,0(%rsp) rorl $14,%r13d movdqa %xmm2,%xmm4 movl %r14d,%r8d movl %ebx,%r12d movdqa %xmm0,%xmm7 rorl $9,%r14d xorl %eax,%r13d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d .byte 102,15,58,15,225,4 andl %eax,%r12d xorl %eax,%r13d .byte 102,15,58,15,251,4 addl 16(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d rorl $11,%r14d movdqa %xmm4,%xmm5 xorl %r9d,%r15d addl %r12d,%edx movdqa %xmm4,%xmm6 rorl $6,%r13d andl %r15d,%edi psrld $3,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi paddd %xmm7,%xmm1 rorl $2,%r14d addl %edx,%r11d psrld $7,%xmm6 addl %edi,%edx movl %r11d,%r13d pshufd $250,%xmm0,%xmm7 addl %edx,%r14d rorl $14,%r13d pslld $14,%xmm5 movl %r14d,%edx movl %eax,%r12d pxor %xmm6,%xmm4 rorl $9,%r14d xorl %r11d,%r13d xorl %ebx,%r12d rorl $5,%r13d psrld $11,%xmm6 xorl %edx,%r14d pxor %xmm5,%xmm4 andl %r11d,%r12d xorl %r11d,%r13d pslld $11,%xmm5 addl 20(%rsp),%ecx movl %edx,%edi pxor %xmm6,%xmm4 xorl %ebx,%r12d rorl $11,%r14d movdqa %xmm7,%xmm6 xorl %r8d,%edi addl %r12d,%ecx pxor %xmm5,%xmm4 rorl $6,%r13d andl %edi,%r15d xorl %edx,%r14d psrld $10,%xmm7 addl %r13d,%ecx xorl %r8d,%r15d paddd %xmm4,%xmm1 rorl $2,%r14d addl %ecx,%r10d psrlq $17,%xmm6 addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d pxor %xmm6,%xmm7 rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d rorl $9,%r14d psrlq $2,%xmm6 xorl %r10d,%r13d xorl %eax,%r12d pxor %xmm6,%xmm7 rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d pshufd $128,%xmm7,%xmm7 xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d psrldq $8,%xmm7 xorl %eax,%r12d rorl $11,%r14d xorl %edx,%r15d addl %r12d,%ebx rorl $6,%r13d paddd %xmm7,%xmm1 andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx pshufd $80,%xmm1,%xmm7 xorl %edx,%edi rorl $2,%r14d addl %ebx,%r9d movdqa %xmm7,%xmm6 addl %edi,%ebx movl %r9d,%r13d psrld $10,%xmm7 addl %ebx,%r14d rorl $14,%r13d psrlq $17,%xmm6 movl %r14d,%ebx movl %r10d,%r12d pxor %xmm6,%xmm7 rorl $9,%r14d xorl %r9d,%r13d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d psrlq $2,%xmm6 andl %r9d,%r12d xorl %r9d,%r13d addl 28(%rsp),%eax pxor %xmm6,%xmm7 movl %ebx,%edi xorl %r11d,%r12d rorl $11,%r14d pshufd $8,%xmm7,%xmm7 xorl %ecx,%edi addl %r12d,%eax movdqa 32(%rbp),%xmm6 rorl $6,%r13d andl %edi,%r15d pslldq $8,%xmm7 xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d paddd %xmm7,%xmm1 rorl $2,%r14d addl %eax,%r8d addl %r15d,%eax paddd %xmm1,%xmm6 movl %r8d,%r13d addl %eax,%r14d movdqa %xmm6,16(%rsp) rorl $14,%r13d movdqa %xmm3,%xmm4 movl %r14d,%eax movl %r9d,%r12d movdqa %xmm1,%xmm7 rorl $9,%r14d xorl %r8d,%r13d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d .byte 102,15,58,15,226,4 andl %r8d,%r12d xorl %r8d,%r13d .byte 102,15,58,15,248,4 addl 32(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d rorl $11,%r14d movdqa %xmm4,%xmm5 xorl %ebx,%r15d addl %r12d,%r11d movdqa %xmm4,%xmm6 rorl $6,%r13d andl %r15d,%edi psrld $3,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi paddd %xmm7,%xmm2 rorl $2,%r14d addl %r11d,%edx psrld $7,%xmm6 addl %edi,%r11d movl %edx,%r13d pshufd $250,%xmm1,%xmm7 addl %r11d,%r14d rorl $14,%r13d pslld $14,%xmm5 movl %r14d,%r11d movl %r8d,%r12d pxor %xmm6,%xmm4 rorl $9,%r14d xorl %edx,%r13d xorl %r9d,%r12d rorl $5,%r13d psrld $11,%xmm6 xorl %r11d,%r14d pxor %xmm5,%xmm4 andl %edx,%r12d xorl %edx,%r13d pslld $11,%xmm5 addl 36(%rsp),%r10d movl %r11d,%edi pxor %xmm6,%xmm4 xorl %r9d,%r12d rorl $11,%r14d movdqa %xmm7,%xmm6 xorl %eax,%edi addl %r12d,%r10d pxor %xmm5,%xmm4 rorl $6,%r13d andl %edi,%r15d xorl %r11d,%r14d psrld $10,%xmm7 addl %r13d,%r10d xorl %eax,%r15d paddd %xmm4,%xmm2 rorl $2,%r14d addl %r10d,%ecx psrlq $17,%xmm6 addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d pxor %xmm6,%xmm7 rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d rorl $9,%r14d psrlq $2,%xmm6 xorl %ecx,%r13d xorl %r8d,%r12d pxor %xmm6,%xmm7 rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d pshufd $128,%xmm7,%xmm7 xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d psrldq $8,%xmm7 xorl %r8d,%r12d rorl $11,%r14d xorl %r11d,%r15d addl %r12d,%r9d rorl $6,%r13d paddd %xmm7,%xmm2 andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d pshufd $80,%xmm2,%xmm7 xorl %r11d,%edi rorl $2,%r14d addl %r9d,%ebx movdqa %xmm7,%xmm6 addl %edi,%r9d movl %ebx,%r13d psrld $10,%xmm7 addl %r9d,%r14d rorl $14,%r13d psrlq $17,%xmm6 movl %r14d,%r9d movl %ecx,%r12d pxor %xmm6,%xmm7 rorl $9,%r14d xorl %ebx,%r13d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d psrlq $2,%xmm6 andl %ebx,%r12d xorl %ebx,%r13d addl 44(%rsp),%r8d pxor %xmm6,%xmm7 movl %r9d,%edi xorl %edx,%r12d rorl $11,%r14d pshufd $8,%xmm7,%xmm7 xorl %r10d,%edi addl %r12d,%r8d movdqa 64(%rbp),%xmm6 rorl $6,%r13d andl %edi,%r15d pslldq $8,%xmm7 xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d paddd %xmm7,%xmm2 rorl $2,%r14d addl %r8d,%eax addl %r15d,%r8d paddd %xmm2,%xmm6 movl %eax,%r13d addl %r8d,%r14d movdqa %xmm6,32(%rsp) rorl $14,%r13d movdqa %xmm0,%xmm4 movl %r14d,%r8d movl %ebx,%r12d movdqa %xmm2,%xmm7 rorl $9,%r14d xorl %eax,%r13d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d .byte 102,15,58,15,227,4 andl %eax,%r12d xorl %eax,%r13d .byte 102,15,58,15,249,4 addl 48(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d rorl $11,%r14d movdqa %xmm4,%xmm5 xorl %r9d,%r15d addl %r12d,%edx movdqa %xmm4,%xmm6 rorl $6,%r13d andl %r15d,%edi psrld $3,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi paddd %xmm7,%xmm3 rorl $2,%r14d addl %edx,%r11d psrld $7,%xmm6 addl %edi,%edx movl %r11d,%r13d pshufd $250,%xmm2,%xmm7 addl %edx,%r14d rorl $14,%r13d pslld $14,%xmm5 movl %r14d,%edx movl %eax,%r12d pxor %xmm6,%xmm4 rorl $9,%r14d xorl %r11d,%r13d xorl %ebx,%r12d rorl $5,%r13d psrld $11,%xmm6 xorl %edx,%r14d pxor %xmm5,%xmm4 andl %r11d,%r12d xorl %r11d,%r13d pslld $11,%xmm5 addl 52(%rsp),%ecx movl %edx,%edi pxor %xmm6,%xmm4 xorl %ebx,%r12d rorl $11,%r14d movdqa %xmm7,%xmm6 xorl %r8d,%edi addl %r12d,%ecx pxor %xmm5,%xmm4 rorl $6,%r13d andl %edi,%r15d xorl %edx,%r14d psrld $10,%xmm7 addl %r13d,%ecx xorl %r8d,%r15d paddd %xmm4,%xmm3 rorl $2,%r14d addl %ecx,%r10d psrlq $17,%xmm6 addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d pxor %xmm6,%xmm7 rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d rorl $9,%r14d psrlq $2,%xmm6 xorl %r10d,%r13d xorl %eax,%r12d pxor %xmm6,%xmm7 rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d pshufd $128,%xmm7,%xmm7 xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d psrldq $8,%xmm7 xorl %eax,%r12d rorl $11,%r14d xorl %edx,%r15d addl %r12d,%ebx rorl $6,%r13d paddd %xmm7,%xmm3 andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx pshufd $80,%xmm3,%xmm7 xorl %edx,%edi rorl $2,%r14d addl %ebx,%r9d movdqa %xmm7,%xmm6 addl %edi,%ebx movl %r9d,%r13d psrld $10,%xmm7 addl %ebx,%r14d rorl $14,%r13d psrlq $17,%xmm6 movl %r14d,%ebx movl %r10d,%r12d pxor %xmm6,%xmm7 rorl $9,%r14d xorl %r9d,%r13d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d psrlq $2,%xmm6 andl %r9d,%r12d xorl %r9d,%r13d addl 60(%rsp),%eax pxor %xmm6,%xmm7 movl %ebx,%edi xorl %r11d,%r12d rorl $11,%r14d pshufd $8,%xmm7,%xmm7 xorl %ecx,%edi addl %r12d,%eax movdqa 96(%rbp),%xmm6 rorl $6,%r13d andl %edi,%r15d pslldq $8,%xmm7 xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d paddd %xmm7,%xmm3 rorl $2,%r14d addl %eax,%r8d addl %r15d,%eax paddd %xmm3,%xmm6 movl %r8d,%r13d addl %eax,%r14d movdqa %xmm6,48(%rsp) cmpb $0,131(%rbp) jne L$ssse3_00_47 rorl $14,%r13d movl %r14d,%eax movl %r9d,%r12d rorl $9,%r14d xorl %r8d,%r13d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d andl %r8d,%r12d xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d rorl $11,%r14d xorl %ebx,%r15d addl %r12d,%r11d rorl $6,%r13d andl %r15d,%edi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi rorl $2,%r14d addl %r11d,%edx addl %edi,%r11d movl %edx,%r13d addl %r11d,%r14d rorl $14,%r13d movl %r14d,%r11d movl %r8d,%r12d rorl $9,%r14d xorl %edx,%r13d xorl %r9d,%r12d rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d addl 4(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d rorl $11,%r14d xorl %eax,%edi addl %r12d,%r10d rorl $6,%r13d andl %edi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d rorl $2,%r14d addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d rorl $9,%r14d xorl %ecx,%r13d xorl %r8d,%r12d rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d xorl %r8d,%r12d rorl $11,%r14d xorl %r11d,%r15d addl %r12d,%r9d rorl $6,%r13d andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%edi rorl $2,%r14d addl %r9d,%ebx addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d rorl $9,%r14d xorl %ebx,%r13d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d rorl $11,%r14d xorl %r10d,%edi addl %r12d,%r8d rorl $6,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d rorl $2,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d rorl $14,%r13d movl %r14d,%r8d movl %ebx,%r12d rorl $9,%r14d xorl %eax,%r13d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d andl %eax,%r12d xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d rorl $11,%r14d xorl %r9d,%r15d addl %r12d,%edx rorl $6,%r13d andl %r15d,%edi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi rorl $2,%r14d addl %edx,%r11d addl %edi,%edx movl %r11d,%r13d addl %edx,%r14d rorl $14,%r13d movl %r14d,%edx movl %eax,%r12d rorl $9,%r14d xorl %r11d,%r13d xorl %ebx,%r12d rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d addl 20(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d rorl $11,%r14d xorl %r8d,%edi addl %r12d,%ecx rorl $6,%r13d andl %edi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d rorl $2,%r14d addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d rorl $9,%r14d xorl %r10d,%r13d xorl %eax,%r12d rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d xorl %eax,%r12d rorl $11,%r14d xorl %edx,%r15d addl %r12d,%ebx rorl $6,%r13d andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%edi rorl $2,%r14d addl %ebx,%r9d addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d rorl $9,%r14d xorl %r9d,%r13d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d rorl $11,%r14d xorl %ecx,%edi addl %r12d,%eax rorl $6,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d rorl $2,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d rorl $14,%r13d movl %r14d,%eax movl %r9d,%r12d rorl $9,%r14d xorl %r8d,%r13d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d andl %r8d,%r12d xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d rorl $11,%r14d xorl %ebx,%r15d addl %r12d,%r11d rorl $6,%r13d andl %r15d,%edi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi rorl $2,%r14d addl %r11d,%edx addl %edi,%r11d movl %edx,%r13d addl %r11d,%r14d rorl $14,%r13d movl %r14d,%r11d movl %r8d,%r12d rorl $9,%r14d xorl %edx,%r13d xorl %r9d,%r12d rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d addl 36(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d rorl $11,%r14d xorl %eax,%edi addl %r12d,%r10d rorl $6,%r13d andl %edi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d rorl $2,%r14d addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d rorl $9,%r14d xorl %ecx,%r13d xorl %r8d,%r12d rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d xorl %r8d,%r12d rorl $11,%r14d xorl %r11d,%r15d addl %r12d,%r9d rorl $6,%r13d andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%edi rorl $2,%r14d addl %r9d,%ebx addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d rorl $9,%r14d xorl %ebx,%r13d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d rorl $11,%r14d xorl %r10d,%edi addl %r12d,%r8d rorl $6,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d rorl $2,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d rorl $14,%r13d movl %r14d,%r8d movl %ebx,%r12d rorl $9,%r14d xorl %eax,%r13d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d andl %eax,%r12d xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d rorl $11,%r14d xorl %r9d,%r15d addl %r12d,%edx rorl $6,%r13d andl %r15d,%edi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi rorl $2,%r14d addl %edx,%r11d addl %edi,%edx movl %r11d,%r13d addl %edx,%r14d rorl $14,%r13d movl %r14d,%edx movl %eax,%r12d rorl $9,%r14d xorl %r11d,%r13d xorl %ebx,%r12d rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d addl 52(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d rorl $11,%r14d xorl %r8d,%edi addl %r12d,%ecx rorl $6,%r13d andl %edi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d rorl $2,%r14d addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d rorl $9,%r14d xorl %r10d,%r13d xorl %eax,%r12d rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d xorl %eax,%r12d rorl $11,%r14d xorl %edx,%r15d addl %r12d,%ebx rorl $6,%r13d andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%edi rorl $2,%r14d addl %ebx,%r9d addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d rorl $9,%r14d xorl %r9d,%r13d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d rorl $11,%r14d xorl %ecx,%edi addl %r12d,%eax rorl $6,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d rorl $2,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d movq 64+0(%rsp),%rdi movl %r14d,%eax addl 0(%rdi),%eax leaq 64(%rsi),%rsi addl 4(%rdi),%ebx addl 8(%rdi),%ecx addl 12(%rdi),%edx addl 16(%rdi),%r8d addl 20(%rdi),%r9d addl 24(%rdi),%r10d addl 28(%rdi),%r11d cmpq 64+16(%rsp),%rsi movl %eax,0(%rdi) movl %ebx,4(%rdi) movl %ecx,8(%rdi) movl %edx,12(%rdi) movl %r8d,16(%rdi) movl %r9d,20(%rdi) movl %r10d,24(%rdi) movl %r11d,28(%rdi) jb L$loop_ssse3 movq 88(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue_ssse3: .byte 0xf3,0xc3 .globl _sha256_block_data_order_avx .private_extern _sha256_block_data_order_avx .p2align 6 _sha256_block_data_order_avx: _CET_ENDBR movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 shlq $4,%rdx subq $96,%rsp leaq (%rsi,%rdx,4),%rdx andq $-64,%rsp movq %rdi,64+0(%rsp) movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %rax,88(%rsp) L$prologue_avx: vzeroupper movl 0(%rdi),%eax movl 4(%rdi),%ebx movl 8(%rdi),%ecx movl 12(%rdi),%edx movl 16(%rdi),%r8d movl 20(%rdi),%r9d movl 24(%rdi),%r10d movl 28(%rdi),%r11d vmovdqa K256+512+32(%rip),%xmm8 vmovdqa K256+512+64(%rip),%xmm9 jmp L$loop_avx .p2align 4 L$loop_avx: vmovdqa K256+512(%rip),%xmm7 vmovdqu 0(%rsi),%xmm0 vmovdqu 16(%rsi),%xmm1 vmovdqu 32(%rsi),%xmm2 vmovdqu 48(%rsi),%xmm3 vpshufb %xmm7,%xmm0,%xmm0 leaq K256(%rip),%rbp vpshufb %xmm7,%xmm1,%xmm1 vpshufb %xmm7,%xmm2,%xmm2 vpaddd 0(%rbp),%xmm0,%xmm4 vpshufb %xmm7,%xmm3,%xmm3 vpaddd 32(%rbp),%xmm1,%xmm5 vpaddd 64(%rbp),%xmm2,%xmm6 vpaddd 96(%rbp),%xmm3,%xmm7 vmovdqa %xmm4,0(%rsp) movl %eax,%r14d vmovdqa %xmm5,16(%rsp) movl %ebx,%edi vmovdqa %xmm6,32(%rsp) xorl %ecx,%edi vmovdqa %xmm7,48(%rsp) movl %r8d,%r13d jmp L$avx_00_47 .p2align 4 L$avx_00_47: subq $-128,%rbp vpalignr $4,%xmm0,%xmm1,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d vpalignr $4,%xmm2,%xmm3,%xmm7 shrdl $9,%r14d,%r14d xorl %r8d,%r13d xorl %r10d,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpaddd %xmm7,%xmm0,%xmm0 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d vpsrld $3,%xmm4,%xmm7 xorl %r10d,%r12d shrdl $11,%r14d,%r14d xorl %ebx,%r15d vpslld $14,%xmm4,%xmm5 addl %r12d,%r11d shrdl $6,%r13d,%r13d andl %r15d,%edi vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi vpshufd $250,%xmm3,%xmm7 shrdl $2,%r14d,%r14d addl %r11d,%edx addl %edi,%r11d vpsrld $11,%xmm6,%xmm6 movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%r11d movl %r8d,%r12d shrdl $9,%r14d,%r14d vpslld $11,%xmm5,%xmm5 xorl %edx,%r13d xorl %r9d,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d vpsrld $10,%xmm7,%xmm6 addl 4(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d vpxor %xmm5,%xmm4,%xmm4 shrdl $11,%r14d,%r14d xorl %eax,%edi addl %r12d,%r10d vpsrlq $17,%xmm7,%xmm7 shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r11d,%r14d vpaddd %xmm4,%xmm0,%xmm0 addl %r13d,%r10d xorl %eax,%r15d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d vpsrlq $2,%xmm7,%xmm7 addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d vpxor %xmm7,%xmm6,%xmm6 movl %edx,%r12d shrdl $9,%r14d,%r14d xorl %ecx,%r13d vpshufb %xmm8,%xmm6,%xmm6 xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d vpaddd %xmm6,%xmm0,%xmm0 andl %ecx,%r12d xorl %ecx,%r13d addl 8(%rsp),%r9d vpshufd $80,%xmm0,%xmm7 movl %r10d,%r15d xorl %r8d,%r12d shrdl $11,%r14d,%r14d vpsrld $10,%xmm7,%xmm6 xorl %r11d,%r15d addl %r12d,%r9d shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d vpxor %xmm7,%xmm6,%xmm6 xorl %r11d,%edi shrdl $2,%r14d,%r14d addl %r9d,%ebx vpsrlq $2,%xmm7,%xmm7 addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d vpxor %xmm7,%xmm6,%xmm6 shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d vpshufb %xmm9,%xmm6,%xmm6 shrdl $9,%r14d,%r14d xorl %ebx,%r13d xorl %edx,%r12d vpaddd %xmm6,%xmm0,%xmm0 shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vpaddd 0(%rbp),%xmm0,%xmm6 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d shrdl $11,%r14d,%r14d xorl %r10d,%edi addl %r12d,%r8d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d shrdl $2,%r14d,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,0(%rsp) vpalignr $4,%xmm1,%xmm2,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d vpalignr $4,%xmm3,%xmm0,%xmm7 shrdl $9,%r14d,%r14d xorl %eax,%r13d xorl %ecx,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpaddd %xmm7,%xmm1,%xmm1 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d vpsrld $3,%xmm4,%xmm7 xorl %ecx,%r12d shrdl $11,%r14d,%r14d xorl %r9d,%r15d vpslld $14,%xmm4,%xmm5 addl %r12d,%edx shrdl $6,%r13d,%r13d andl %r15d,%edi vpxor %xmm6,%xmm7,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi vpshufd $250,%xmm0,%xmm7 shrdl $2,%r14d,%r14d addl %edx,%r11d addl %edi,%edx vpsrld $11,%xmm6,%xmm6 movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%edx movl %eax,%r12d shrdl $9,%r14d,%r14d vpslld $11,%xmm5,%xmm5 xorl %r11d,%r13d xorl %ebx,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d vpsrld $10,%xmm7,%xmm6 addl 20(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d vpxor %xmm5,%xmm4,%xmm4 shrdl $11,%r14d,%r14d xorl %r8d,%edi addl %r12d,%ecx vpsrlq $17,%xmm7,%xmm7 shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %edx,%r14d vpaddd %xmm4,%xmm1,%xmm1 addl %r13d,%ecx xorl %r8d,%r15d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d vpsrlq $2,%xmm7,%xmm7 addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx vpxor %xmm7,%xmm6,%xmm6 movl %r11d,%r12d shrdl $9,%r14d,%r14d xorl %r10d,%r13d vpshufb %xmm8,%xmm6,%xmm6 xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d vpaddd %xmm6,%xmm1,%xmm1 andl %r10d,%r12d xorl %r10d,%r13d addl 24(%rsp),%ebx vpshufd $80,%xmm1,%xmm7 movl %ecx,%r15d xorl %eax,%r12d shrdl $11,%r14d,%r14d vpsrld $10,%xmm7,%xmm6 xorl %edx,%r15d addl %r12d,%ebx shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx vpxor %xmm7,%xmm6,%xmm6 xorl %edx,%edi shrdl $2,%r14d,%r14d addl %ebx,%r9d vpsrlq $2,%xmm7,%xmm7 addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d vpxor %xmm7,%xmm6,%xmm6 shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d vpshufb %xmm9,%xmm6,%xmm6 shrdl $9,%r14d,%r14d xorl %r9d,%r13d xorl %r11d,%r12d vpaddd %xmm6,%xmm1,%xmm1 shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpaddd 32(%rbp),%xmm1,%xmm6 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d shrdl $11,%r14d,%r14d xorl %ecx,%edi addl %r12d,%eax shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d shrdl $2,%r14d,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,16(%rsp) vpalignr $4,%xmm2,%xmm3,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d vpalignr $4,%xmm0,%xmm1,%xmm7 shrdl $9,%r14d,%r14d xorl %r8d,%r13d xorl %r10d,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpaddd %xmm7,%xmm2,%xmm2 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d vpsrld $3,%xmm4,%xmm7 xorl %r10d,%r12d shrdl $11,%r14d,%r14d xorl %ebx,%r15d vpslld $14,%xmm4,%xmm5 addl %r12d,%r11d shrdl $6,%r13d,%r13d andl %r15d,%edi vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi vpshufd $250,%xmm1,%xmm7 shrdl $2,%r14d,%r14d addl %r11d,%edx addl %edi,%r11d vpsrld $11,%xmm6,%xmm6 movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%r11d movl %r8d,%r12d shrdl $9,%r14d,%r14d vpslld $11,%xmm5,%xmm5 xorl %edx,%r13d xorl %r9d,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d vpsrld $10,%xmm7,%xmm6 addl 36(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d vpxor %xmm5,%xmm4,%xmm4 shrdl $11,%r14d,%r14d xorl %eax,%edi addl %r12d,%r10d vpsrlq $17,%xmm7,%xmm7 shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r11d,%r14d vpaddd %xmm4,%xmm2,%xmm2 addl %r13d,%r10d xorl %eax,%r15d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d vpsrlq $2,%xmm7,%xmm7 addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d vpxor %xmm7,%xmm6,%xmm6 movl %edx,%r12d shrdl $9,%r14d,%r14d xorl %ecx,%r13d vpshufb %xmm8,%xmm6,%xmm6 xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d vpaddd %xmm6,%xmm2,%xmm2 andl %ecx,%r12d xorl %ecx,%r13d addl 40(%rsp),%r9d vpshufd $80,%xmm2,%xmm7 movl %r10d,%r15d xorl %r8d,%r12d shrdl $11,%r14d,%r14d vpsrld $10,%xmm7,%xmm6 xorl %r11d,%r15d addl %r12d,%r9d shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d vpxor %xmm7,%xmm6,%xmm6 xorl %r11d,%edi shrdl $2,%r14d,%r14d addl %r9d,%ebx vpsrlq $2,%xmm7,%xmm7 addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d vpxor %xmm7,%xmm6,%xmm6 shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d vpshufb %xmm9,%xmm6,%xmm6 shrdl $9,%r14d,%r14d xorl %ebx,%r13d xorl %edx,%r12d vpaddd %xmm6,%xmm2,%xmm2 shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vpaddd 64(%rbp),%xmm2,%xmm6 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d shrdl $11,%r14d,%r14d xorl %r10d,%edi addl %r12d,%r8d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d shrdl $2,%r14d,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,32(%rsp) vpalignr $4,%xmm3,%xmm0,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d vpalignr $4,%xmm1,%xmm2,%xmm7 shrdl $9,%r14d,%r14d xorl %eax,%r13d xorl %ecx,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpaddd %xmm7,%xmm3,%xmm3 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d vpsrld $3,%xmm4,%xmm7 xorl %ecx,%r12d shrdl $11,%r14d,%r14d xorl %r9d,%r15d vpslld $14,%xmm4,%xmm5 addl %r12d,%edx shrdl $6,%r13d,%r13d andl %r15d,%edi vpxor %xmm6,%xmm7,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi vpshufd $250,%xmm2,%xmm7 shrdl $2,%r14d,%r14d addl %edx,%r11d addl %edi,%edx vpsrld $11,%xmm6,%xmm6 movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%edx movl %eax,%r12d shrdl $9,%r14d,%r14d vpslld $11,%xmm5,%xmm5 xorl %r11d,%r13d xorl %ebx,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d vpsrld $10,%xmm7,%xmm6 addl 52(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d vpxor %xmm5,%xmm4,%xmm4 shrdl $11,%r14d,%r14d xorl %r8d,%edi addl %r12d,%ecx vpsrlq $17,%xmm7,%xmm7 shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %edx,%r14d vpaddd %xmm4,%xmm3,%xmm3 addl %r13d,%ecx xorl %r8d,%r15d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d vpsrlq $2,%xmm7,%xmm7 addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx vpxor %xmm7,%xmm6,%xmm6 movl %r11d,%r12d shrdl $9,%r14d,%r14d xorl %r10d,%r13d vpshufb %xmm8,%xmm6,%xmm6 xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d vpaddd %xmm6,%xmm3,%xmm3 andl %r10d,%r12d xorl %r10d,%r13d addl 56(%rsp),%ebx vpshufd $80,%xmm3,%xmm7 movl %ecx,%r15d xorl %eax,%r12d shrdl $11,%r14d,%r14d vpsrld $10,%xmm7,%xmm6 xorl %edx,%r15d addl %r12d,%ebx shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx vpxor %xmm7,%xmm6,%xmm6 xorl %edx,%edi shrdl $2,%r14d,%r14d addl %ebx,%r9d vpsrlq $2,%xmm7,%xmm7 addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d vpxor %xmm7,%xmm6,%xmm6 shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d vpshufb %xmm9,%xmm6,%xmm6 shrdl $9,%r14d,%r14d xorl %r9d,%r13d xorl %r11d,%r12d vpaddd %xmm6,%xmm3,%xmm3 shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpaddd 96(%rbp),%xmm3,%xmm6 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d shrdl $11,%r14d,%r14d xorl %ecx,%edi addl %r12d,%eax shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d shrdl $2,%r14d,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,48(%rsp) cmpb $0,131(%rbp) jne L$avx_00_47 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d shrdl $9,%r14d,%r14d xorl %r8d,%r13d xorl %r10d,%r12d shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d shrdl $11,%r14d,%r14d xorl %ebx,%r15d addl %r12d,%r11d shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi shrdl $2,%r14d,%r14d addl %r11d,%edx addl %edi,%r11d movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r11d movl %r8d,%r12d shrdl $9,%r14d,%r14d xorl %edx,%r13d xorl %r9d,%r12d shrdl $5,%r13d,%r13d xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d addl 4(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d shrdl $11,%r14d,%r14d xorl %eax,%edi addl %r12d,%r10d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d shrdl $2,%r14d,%r14d addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d movl %edx,%r12d shrdl $9,%r14d,%r14d xorl %ecx,%r13d xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d andl %ecx,%r12d xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d xorl %r8d,%r12d shrdl $11,%r14d,%r14d xorl %r11d,%r15d addl %r12d,%r9d shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%edi shrdl $2,%r14d,%r14d addl %r9d,%ebx addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d shrdl $9,%r14d,%r14d xorl %ebx,%r13d xorl %edx,%r12d shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d shrdl $11,%r14d,%r14d xorl %r10d,%edi addl %r12d,%r8d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d shrdl $2,%r14d,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d shrdl $9,%r14d,%r14d xorl %eax,%r13d xorl %ecx,%r12d shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d shrdl $11,%r14d,%r14d xorl %r9d,%r15d addl %r12d,%edx shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi shrdl $2,%r14d,%r14d addl %edx,%r11d addl %edi,%edx movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%edx movl %eax,%r12d shrdl $9,%r14d,%r14d xorl %r11d,%r13d xorl %ebx,%r12d shrdl $5,%r13d,%r13d xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d addl 20(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d shrdl $11,%r14d,%r14d xorl %r8d,%edi addl %r12d,%ecx shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d shrdl $2,%r14d,%r14d addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx movl %r11d,%r12d shrdl $9,%r14d,%r14d xorl %r10d,%r13d xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d andl %r10d,%r12d xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d xorl %eax,%r12d shrdl $11,%r14d,%r14d xorl %edx,%r15d addl %r12d,%ebx shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%edi shrdl $2,%r14d,%r14d addl %ebx,%r9d addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d shrdl $9,%r14d,%r14d xorl %r9d,%r13d xorl %r11d,%r12d shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d shrdl $11,%r14d,%r14d xorl %ecx,%edi addl %r12d,%eax shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d shrdl $2,%r14d,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d shrdl $9,%r14d,%r14d xorl %r8d,%r13d xorl %r10d,%r12d shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d shrdl $11,%r14d,%r14d xorl %ebx,%r15d addl %r12d,%r11d shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi shrdl $2,%r14d,%r14d addl %r11d,%edx addl %edi,%r11d movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r11d movl %r8d,%r12d shrdl $9,%r14d,%r14d xorl %edx,%r13d xorl %r9d,%r12d shrdl $5,%r13d,%r13d xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d addl 36(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d shrdl $11,%r14d,%r14d xorl %eax,%edi addl %r12d,%r10d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d shrdl $2,%r14d,%r14d addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d movl %edx,%r12d shrdl $9,%r14d,%r14d xorl %ecx,%r13d xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d andl %ecx,%r12d xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d xorl %r8d,%r12d shrdl $11,%r14d,%r14d xorl %r11d,%r15d addl %r12d,%r9d shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%edi shrdl $2,%r14d,%r14d addl %r9d,%ebx addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d shrdl $9,%r14d,%r14d xorl %ebx,%r13d xorl %edx,%r12d shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d shrdl $11,%r14d,%r14d xorl %r10d,%edi addl %r12d,%r8d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d shrdl $2,%r14d,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d shrdl $9,%r14d,%r14d xorl %eax,%r13d xorl %ecx,%r12d shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d shrdl $11,%r14d,%r14d xorl %r9d,%r15d addl %r12d,%edx shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi shrdl $2,%r14d,%r14d addl %edx,%r11d addl %edi,%edx movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%edx movl %eax,%r12d shrdl $9,%r14d,%r14d xorl %r11d,%r13d xorl %ebx,%r12d shrdl $5,%r13d,%r13d xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d addl 52(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d shrdl $11,%r14d,%r14d xorl %r8d,%edi addl %r12d,%ecx shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d shrdl $2,%r14d,%r14d addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx movl %r11d,%r12d shrdl $9,%r14d,%r14d xorl %r10d,%r13d xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d andl %r10d,%r12d xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d xorl %eax,%r12d shrdl $11,%r14d,%r14d xorl %edx,%r15d addl %r12d,%ebx shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%edi shrdl $2,%r14d,%r14d addl %ebx,%r9d addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d shrdl $9,%r14d,%r14d xorl %r9d,%r13d xorl %r11d,%r12d shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d shrdl $11,%r14d,%r14d xorl %ecx,%edi addl %r12d,%eax shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d shrdl $2,%r14d,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d movq 64+0(%rsp),%rdi movl %r14d,%eax addl 0(%rdi),%eax leaq 64(%rsi),%rsi addl 4(%rdi),%ebx addl 8(%rdi),%ecx addl 12(%rdi),%edx addl 16(%rdi),%r8d addl 20(%rdi),%r9d addl 24(%rdi),%r10d addl 28(%rdi),%r11d cmpq 64+16(%rsp),%rsi movl %eax,0(%rdi) movl %ebx,4(%rdi) movl %ecx,8(%rdi) movl %edx,12(%rdi) movl %r8d,16(%rdi) movl %r9d,20(%rdi) movl %r10d,24(%rdi) movl %r11d,28(%rdi) jb L$loop_avx movq 88(%rsp),%rsi vzeroupper movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue_avx: .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
22,732
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/ghash-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _gcm_init_clmul .private_extern _gcm_init_clmul .p2align 4 _gcm_init_clmul: _CET_ENDBR L$_init_clmul: movdqu (%rsi),%xmm2 pshufd $78,%xmm2,%xmm2 pshufd $255,%xmm2,%xmm4 movdqa %xmm2,%xmm3 psllq $1,%xmm2 pxor %xmm5,%xmm5 psrlq $63,%xmm3 pcmpgtd %xmm4,%xmm5 pslldq $8,%xmm3 por %xmm3,%xmm2 pand L$0x1c2_polynomial(%rip),%xmm5 pxor %xmm5,%xmm2 pshufd $78,%xmm2,%xmm6 movdqa %xmm2,%xmm0 pxor %xmm2,%xmm6 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,222,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 pshufd $78,%xmm2,%xmm3 pshufd $78,%xmm0,%xmm4 pxor %xmm2,%xmm3 movdqu %xmm2,0(%rdi) pxor %xmm0,%xmm4 movdqu %xmm0,16(%rdi) .byte 102,15,58,15,227,8 movdqu %xmm4,32(%rdi) movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,222,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 movdqa %xmm0,%xmm5 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,222,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 pshufd $78,%xmm5,%xmm3 pshufd $78,%xmm0,%xmm4 pxor %xmm5,%xmm3 movdqu %xmm5,48(%rdi) pxor %xmm0,%xmm4 movdqu %xmm0,64(%rdi) .byte 102,15,58,15,227,8 movdqu %xmm4,80(%rdi) .byte 0xf3,0xc3 .globl _gcm_gmult_clmul .private_extern _gcm_gmult_clmul .p2align 4 _gcm_gmult_clmul: _CET_ENDBR L$_gmult_clmul: movdqu (%rdi),%xmm0 movdqa L$bswap_mask(%rip),%xmm5 movdqu (%rsi),%xmm2 movdqu 32(%rsi),%xmm4 .byte 102,15,56,0,197 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,220,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 .byte 102,15,56,0,197 movdqu %xmm0,(%rdi) .byte 0xf3,0xc3 .globl _gcm_ghash_clmul .private_extern _gcm_ghash_clmul .p2align 5 _gcm_ghash_clmul: _CET_ENDBR L$_ghash_clmul: movdqa L$bswap_mask(%rip),%xmm10 movdqu (%rdi),%xmm0 movdqu (%rsi),%xmm2 movdqu 32(%rsi),%xmm7 .byte 102,65,15,56,0,194 subq $0x10,%rcx jz L$odd_tail movdqu 16(%rsi),%xmm6 cmpq $0x30,%rcx jb L$skip4x subq $0x30,%rcx movq $0xA040608020C0E000,%rax movdqu 48(%rsi),%xmm14 movdqu 64(%rsi),%xmm15 movdqu 48(%rdx),%xmm3 movdqu 32(%rdx),%xmm11 .byte 102,65,15,56,0,218 .byte 102,69,15,56,0,218 movdqa %xmm3,%xmm5 pshufd $78,%xmm3,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,68,218,0 .byte 102,15,58,68,234,17 .byte 102,15,58,68,231,0 movdqa %xmm11,%xmm13 pshufd $78,%xmm11,%xmm12 pxor %xmm11,%xmm12 .byte 102,68,15,58,68,222,0 .byte 102,68,15,58,68,238,17 .byte 102,68,15,58,68,231,16 xorps %xmm11,%xmm3 xorps %xmm13,%xmm5 movups 80(%rsi),%xmm7 xorps %xmm12,%xmm4 movdqu 16(%rdx),%xmm11 movdqu 0(%rdx),%xmm8 .byte 102,69,15,56,0,218 .byte 102,69,15,56,0,194 movdqa %xmm11,%xmm13 pshufd $78,%xmm11,%xmm12 pxor %xmm8,%xmm0 pxor %xmm11,%xmm12 .byte 102,69,15,58,68,222,0 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm8 pxor %xmm0,%xmm8 .byte 102,69,15,58,68,238,17 .byte 102,68,15,58,68,231,0 xorps %xmm11,%xmm3 xorps %xmm13,%xmm5 leaq 64(%rdx),%rdx subq $0x40,%rcx jc L$tail4x jmp L$mod4_loop .p2align 5 L$mod4_loop: .byte 102,65,15,58,68,199,0 xorps %xmm12,%xmm4 movdqu 48(%rdx),%xmm11 .byte 102,69,15,56,0,218 .byte 102,65,15,58,68,207,17 xorps %xmm3,%xmm0 movdqu 32(%rdx),%xmm3 movdqa %xmm11,%xmm13 .byte 102,68,15,58,68,199,16 pshufd $78,%xmm11,%xmm12 xorps %xmm5,%xmm1 pxor %xmm11,%xmm12 .byte 102,65,15,56,0,218 movups 32(%rsi),%xmm7 xorps %xmm4,%xmm8 .byte 102,68,15,58,68,218,0 pshufd $78,%xmm3,%xmm4 pxor %xmm0,%xmm8 movdqa %xmm3,%xmm5 pxor %xmm1,%xmm8 pxor %xmm3,%xmm4 movdqa %xmm8,%xmm9 .byte 102,68,15,58,68,234,17 pslldq $8,%xmm8 psrldq $8,%xmm9 pxor %xmm8,%xmm0 movdqa L$7_mask(%rip),%xmm8 pxor %xmm9,%xmm1 .byte 102,76,15,110,200 pand %xmm0,%xmm8 .byte 102,69,15,56,0,200 pxor %xmm0,%xmm9 .byte 102,68,15,58,68,231,0 psllq $57,%xmm9 movdqa %xmm9,%xmm8 pslldq $8,%xmm9 .byte 102,15,58,68,222,0 psrldq $8,%xmm8 pxor %xmm9,%xmm0 pxor %xmm8,%xmm1 movdqu 0(%rdx),%xmm8 movdqa %xmm0,%xmm9 psrlq $1,%xmm0 .byte 102,15,58,68,238,17 xorps %xmm11,%xmm3 movdqu 16(%rdx),%xmm11 .byte 102,69,15,56,0,218 .byte 102,15,58,68,231,16 xorps %xmm13,%xmm5 movups 80(%rsi),%xmm7 .byte 102,69,15,56,0,194 pxor %xmm9,%xmm1 pxor %xmm0,%xmm9 psrlq $5,%xmm0 movdqa %xmm11,%xmm13 pxor %xmm12,%xmm4 pshufd $78,%xmm11,%xmm12 pxor %xmm9,%xmm0 pxor %xmm8,%xmm1 pxor %xmm11,%xmm12 .byte 102,69,15,58,68,222,0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 movdqa %xmm0,%xmm1 .byte 102,69,15,58,68,238,17 xorps %xmm11,%xmm3 pshufd $78,%xmm0,%xmm8 pxor %xmm0,%xmm8 .byte 102,68,15,58,68,231,0 xorps %xmm13,%xmm5 leaq 64(%rdx),%rdx subq $0x40,%rcx jnc L$mod4_loop L$tail4x: .byte 102,65,15,58,68,199,0 .byte 102,65,15,58,68,207,17 .byte 102,68,15,58,68,199,16 xorps %xmm12,%xmm4 xorps %xmm3,%xmm0 xorps %xmm5,%xmm1 pxor %xmm0,%xmm1 pxor %xmm4,%xmm8 pxor %xmm1,%xmm8 pxor %xmm0,%xmm1 movdqa %xmm8,%xmm9 psrldq $8,%xmm8 pslldq $8,%xmm9 pxor %xmm8,%xmm1 pxor %xmm9,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 addq $0x40,%rcx jz L$done movdqu 32(%rsi),%xmm7 subq $0x10,%rcx jz L$odd_tail L$skip4x: movdqu (%rdx),%xmm8 movdqu 16(%rdx),%xmm3 .byte 102,69,15,56,0,194 .byte 102,65,15,56,0,218 pxor %xmm8,%xmm0 movdqa %xmm3,%xmm5 pshufd $78,%xmm3,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,68,218,0 .byte 102,15,58,68,234,17 .byte 102,15,58,68,231,0 leaq 32(%rdx),%rdx nop subq $0x20,%rcx jbe L$even_tail nop jmp L$mod_loop .p2align 5 L$mod_loop: movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm8 pshufd $78,%xmm0,%xmm4 pxor %xmm0,%xmm4 .byte 102,15,58,68,198,0 .byte 102,15,58,68,206,17 .byte 102,15,58,68,231,16 pxor %xmm3,%xmm0 pxor %xmm5,%xmm1 movdqu (%rdx),%xmm9 pxor %xmm0,%xmm8 .byte 102,69,15,56,0,202 movdqu 16(%rdx),%xmm3 pxor %xmm1,%xmm8 pxor %xmm9,%xmm1 pxor %xmm8,%xmm4 .byte 102,65,15,56,0,218 movdqa %xmm4,%xmm8 psrldq $8,%xmm8 pslldq $8,%xmm4 pxor %xmm8,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm3,%xmm5 movdqa %xmm0,%xmm9 movdqa %xmm0,%xmm8 psllq $5,%xmm0 pxor %xmm0,%xmm8 .byte 102,15,58,68,218,0 psllq $1,%xmm0 pxor %xmm8,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm8 pslldq $8,%xmm0 psrldq $8,%xmm8 pxor %xmm9,%xmm0 pshufd $78,%xmm5,%xmm4 pxor %xmm8,%xmm1 pxor %xmm5,%xmm4 movdqa %xmm0,%xmm9 psrlq $1,%xmm0 .byte 102,15,58,68,234,17 pxor %xmm9,%xmm1 pxor %xmm0,%xmm9 psrlq $5,%xmm0 pxor %xmm9,%xmm0 leaq 32(%rdx),%rdx psrlq $1,%xmm0 .byte 102,15,58,68,231,0 pxor %xmm1,%xmm0 subq $0x20,%rcx ja L$mod_loop L$even_tail: movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm8 pshufd $78,%xmm0,%xmm4 pxor %xmm0,%xmm4 .byte 102,15,58,68,198,0 .byte 102,15,58,68,206,17 .byte 102,15,58,68,231,16 pxor %xmm3,%xmm0 pxor %xmm5,%xmm1 pxor %xmm0,%xmm8 pxor %xmm1,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm8 psrldq $8,%xmm8 pslldq $8,%xmm4 pxor %xmm8,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 testq %rcx,%rcx jnz L$done L$odd_tail: movdqu (%rdx),%xmm8 .byte 102,69,15,56,0,194 pxor %xmm8,%xmm0 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,223,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 L$done: .byte 102,65,15,56,0,194 movdqu %xmm0,(%rdi) .byte 0xf3,0xc3 .globl _gcm_init_avx .private_extern _gcm_init_avx .p2align 5 _gcm_init_avx: _CET_ENDBR vzeroupper vmovdqu (%rsi),%xmm2 vpshufd $78,%xmm2,%xmm2 vpshufd $255,%xmm2,%xmm4 vpsrlq $63,%xmm2,%xmm3 vpsllq $1,%xmm2,%xmm2 vpxor %xmm5,%xmm5,%xmm5 vpcmpgtd %xmm4,%xmm5,%xmm5 vpslldq $8,%xmm3,%xmm3 vpor %xmm3,%xmm2,%xmm2 vpand L$0x1c2_polynomial(%rip),%xmm5,%xmm5 vpxor %xmm5,%xmm2,%xmm2 vpunpckhqdq %xmm2,%xmm2,%xmm6 vmovdqa %xmm2,%xmm0 vpxor %xmm2,%xmm6,%xmm6 movq $4,%r10 jmp L$init_start_avx .p2align 5 L$init_loop_avx: vpalignr $8,%xmm3,%xmm4,%xmm5 vmovdqu %xmm5,-16(%rdi) vpunpckhqdq %xmm0,%xmm0,%xmm3 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm2,%xmm0,%xmm1 vpclmulqdq $0x00,%xmm2,%xmm0,%xmm0 vpclmulqdq $0x00,%xmm6,%xmm3,%xmm3 vpxor %xmm0,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $8,%xmm3,%xmm4 vpsrldq $8,%xmm3,%xmm3 vpxor %xmm4,%xmm0,%xmm0 vpxor %xmm3,%xmm1,%xmm1 vpsllq $57,%xmm0,%xmm3 vpsllq $62,%xmm0,%xmm4 vpxor %xmm3,%xmm4,%xmm4 vpsllq $63,%xmm0,%xmm3 vpxor %xmm3,%xmm4,%xmm4 vpslldq $8,%xmm4,%xmm3 vpsrldq $8,%xmm4,%xmm4 vpxor %xmm3,%xmm0,%xmm0 vpxor %xmm4,%xmm1,%xmm1 vpsrlq $1,%xmm0,%xmm4 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm4,%xmm0,%xmm0 vpsrlq $5,%xmm4,%xmm4 vpxor %xmm4,%xmm0,%xmm0 vpsrlq $1,%xmm0,%xmm0 vpxor %xmm1,%xmm0,%xmm0 L$init_start_avx: vmovdqa %xmm0,%xmm5 vpunpckhqdq %xmm0,%xmm0,%xmm3 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm2,%xmm0,%xmm1 vpclmulqdq $0x00,%xmm2,%xmm0,%xmm0 vpclmulqdq $0x00,%xmm6,%xmm3,%xmm3 vpxor %xmm0,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $8,%xmm3,%xmm4 vpsrldq $8,%xmm3,%xmm3 vpxor %xmm4,%xmm0,%xmm0 vpxor %xmm3,%xmm1,%xmm1 vpsllq $57,%xmm0,%xmm3 vpsllq $62,%xmm0,%xmm4 vpxor %xmm3,%xmm4,%xmm4 vpsllq $63,%xmm0,%xmm3 vpxor %xmm3,%xmm4,%xmm4 vpslldq $8,%xmm4,%xmm3 vpsrldq $8,%xmm4,%xmm4 vpxor %xmm3,%xmm0,%xmm0 vpxor %xmm4,%xmm1,%xmm1 vpsrlq $1,%xmm0,%xmm4 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm4,%xmm0,%xmm0 vpsrlq $5,%xmm4,%xmm4 vpxor %xmm4,%xmm0,%xmm0 vpsrlq $1,%xmm0,%xmm0 vpxor %xmm1,%xmm0,%xmm0 vpshufd $78,%xmm5,%xmm3 vpshufd $78,%xmm0,%xmm4 vpxor %xmm5,%xmm3,%xmm3 vmovdqu %xmm5,0(%rdi) vpxor %xmm0,%xmm4,%xmm4 vmovdqu %xmm0,16(%rdi) leaq 48(%rdi),%rdi subq $1,%r10 jnz L$init_loop_avx vpalignr $8,%xmm4,%xmm3,%xmm5 vmovdqu %xmm5,-16(%rdi) vzeroupper .byte 0xf3,0xc3 .globl _gcm_gmult_avx .private_extern _gcm_gmult_avx .p2align 5 _gcm_gmult_avx: _CET_ENDBR jmp L$_gmult_clmul .globl _gcm_ghash_avx .private_extern _gcm_ghash_avx .p2align 5 _gcm_ghash_avx: _CET_ENDBR vzeroupper vmovdqu (%rdi),%xmm10 leaq L$0x1c2_polynomial(%rip),%r10 leaq 64(%rsi),%rsi vmovdqu L$bswap_mask(%rip),%xmm13 vpshufb %xmm13,%xmm10,%xmm10 cmpq $0x80,%rcx jb L$short_avx subq $0x80,%rcx vmovdqu 112(%rdx),%xmm14 vmovdqu 0-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm14 vmovdqu 32-64(%rsi),%xmm7 vpunpckhqdq %xmm14,%xmm14,%xmm9 vmovdqu 96(%rdx),%xmm15 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm14,%xmm9,%xmm9 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 16-64(%rsi),%xmm6 vpunpckhqdq %xmm15,%xmm15,%xmm8 vmovdqu 80(%rdx),%xmm14 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm15,%xmm8,%xmm8 vpshufb %xmm13,%xmm14,%xmm14 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 48-64(%rsi),%xmm6 vpxor %xmm14,%xmm9,%xmm9 vmovdqu 64(%rdx),%xmm15 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 80-64(%rsi),%xmm7 vpshufb %xmm13,%xmm15,%xmm15 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm1,%xmm4,%xmm4 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 64-64(%rsi),%xmm6 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm15,%xmm8,%xmm8 vmovdqu 48(%rdx),%xmm14 vpxor %xmm3,%xmm0,%xmm0 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpxor %xmm4,%xmm1,%xmm1 vpshufb %xmm13,%xmm14,%xmm14 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 96-64(%rsi),%xmm6 vpxor %xmm5,%xmm2,%xmm2 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 128-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vmovdqu 32(%rdx),%xmm15 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm1,%xmm4,%xmm4 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 112-64(%rsi),%xmm6 vpxor %xmm2,%xmm5,%xmm5 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm15,%xmm8,%xmm8 vmovdqu 16(%rdx),%xmm14 vpxor %xmm3,%xmm0,%xmm0 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpxor %xmm4,%xmm1,%xmm1 vpshufb %xmm13,%xmm14,%xmm14 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 144-64(%rsi),%xmm6 vpxor %xmm5,%xmm2,%xmm2 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 176-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vmovdqu (%rdx),%xmm15 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm1,%xmm4,%xmm4 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 160-64(%rsi),%xmm6 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x10,%xmm7,%xmm9,%xmm2 leaq 128(%rdx),%rdx cmpq $0x80,%rcx jb L$tail_avx vpxor %xmm10,%xmm15,%xmm15 subq $0x80,%rcx jmp L$oop8x_avx .p2align 5 L$oop8x_avx: vpunpckhqdq %xmm15,%xmm15,%xmm8 vmovdqu 112(%rdx),%xmm14 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm15,%xmm8,%xmm8 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm10 vpshufb %xmm13,%xmm14,%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm11 vmovdqu 0-64(%rsi),%xmm6 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm12 vmovdqu 32-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vmovdqu 96(%rdx),%xmm15 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm3,%xmm10,%xmm10 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vxorps %xmm4,%xmm11,%xmm11 vmovdqu 16-64(%rsi),%xmm6 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm5,%xmm12,%xmm12 vxorps %xmm15,%xmm8,%xmm8 vmovdqu 80(%rdx),%xmm14 vpxor %xmm10,%xmm12,%xmm12 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpxor %xmm11,%xmm12,%xmm12 vpslldq $8,%xmm12,%xmm9 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vpsrldq $8,%xmm12,%xmm12 vpxor %xmm9,%xmm10,%xmm10 vmovdqu 48-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm14 vxorps %xmm12,%xmm11,%xmm11 vpxor %xmm1,%xmm4,%xmm4 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 80-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vpxor %xmm2,%xmm5,%xmm5 vmovdqu 64(%rdx),%xmm15 vpalignr $8,%xmm10,%xmm10,%xmm12 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpshufb %xmm13,%xmm15,%xmm15 vpxor %xmm3,%xmm0,%xmm0 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 64-64(%rsi),%xmm6 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vxorps %xmm15,%xmm8,%xmm8 vpxor %xmm5,%xmm2,%xmm2 vmovdqu 48(%rdx),%xmm14 vpclmulqdq $0x10,(%r10),%xmm10,%xmm10 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpshufb %xmm13,%xmm14,%xmm14 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 96-64(%rsi),%xmm6 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 128-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vpxor %xmm2,%xmm5,%xmm5 vmovdqu 32(%rdx),%xmm15 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpshufb %xmm13,%xmm15,%xmm15 vpxor %xmm3,%xmm0,%xmm0 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 112-64(%rsi),%xmm6 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm15,%xmm8,%xmm8 vpxor %xmm5,%xmm2,%xmm2 vxorps %xmm12,%xmm10,%xmm10 vmovdqu 16(%rdx),%xmm14 vpalignr $8,%xmm10,%xmm10,%xmm12 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpshufb %xmm13,%xmm14,%xmm14 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 144-64(%rsi),%xmm6 vpclmulqdq $0x10,(%r10),%xmm10,%xmm10 vxorps %xmm11,%xmm12,%xmm12 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 176-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vpxor %xmm2,%xmm5,%xmm5 vmovdqu (%rdx),%xmm15 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 160-64(%rsi),%xmm6 vpxor %xmm12,%xmm15,%xmm15 vpclmulqdq $0x10,%xmm7,%xmm9,%xmm2 vpxor %xmm10,%xmm15,%xmm15 leaq 128(%rdx),%rdx subq $0x80,%rcx jnc L$oop8x_avx addq $0x80,%rcx jmp L$tail_no_xor_avx .p2align 5 L$short_avx: vmovdqu -16(%rdx,%rcx,1),%xmm14 leaq (%rdx,%rcx,1),%rdx vmovdqu 0-64(%rsi),%xmm6 vmovdqu 32-64(%rsi),%xmm7 vpshufb %xmm13,%xmm14,%xmm15 vmovdqa %xmm0,%xmm3 vmovdqa %xmm1,%xmm4 vmovdqa %xmm2,%xmm5 subq $0x10,%rcx jz L$tail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -32(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 16-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vpsrldq $8,%xmm7,%xmm7 subq $0x10,%rcx jz L$tail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -48(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 48-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vmovdqu 80-64(%rsi),%xmm7 subq $0x10,%rcx jz L$tail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -64(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 64-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vpsrldq $8,%xmm7,%xmm7 subq $0x10,%rcx jz L$tail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -80(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 96-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vmovdqu 128-64(%rsi),%xmm7 subq $0x10,%rcx jz L$tail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -96(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 112-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vpsrldq $8,%xmm7,%xmm7 subq $0x10,%rcx jz L$tail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -112(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 144-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vmovq 184-64(%rsi),%xmm7 subq $0x10,%rcx jmp L$tail_avx .p2align 5 L$tail_avx: vpxor %xmm10,%xmm15,%xmm15 L$tail_no_xor_avx: vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vmovdqu (%r10),%xmm12 vpxor %xmm0,%xmm3,%xmm10 vpxor %xmm1,%xmm4,%xmm11 vpxor %xmm2,%xmm5,%xmm5 vpxor %xmm10,%xmm5,%xmm5 vpxor %xmm11,%xmm5,%xmm5 vpslldq $8,%xmm5,%xmm9 vpsrldq $8,%xmm5,%xmm5 vpxor %xmm9,%xmm10,%xmm10 vpxor %xmm5,%xmm11,%xmm11 vpclmulqdq $0x10,%xmm12,%xmm10,%xmm9 vpalignr $8,%xmm10,%xmm10,%xmm10 vpxor %xmm9,%xmm10,%xmm10 vpclmulqdq $0x10,%xmm12,%xmm10,%xmm9 vpalignr $8,%xmm10,%xmm10,%xmm10 vpxor %xmm11,%xmm10,%xmm10 vpxor %xmm9,%xmm10,%xmm10 cmpq $0,%rcx jne L$short_avx vpshufb %xmm13,%xmm10,%xmm10 vmovdqu %xmm10,(%rdi) vzeroupper .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 L$bswap_mask: .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 L$0x1c2_polynomial: .byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2 L$7_mask: .long 7,0,7,0 .p2align 6 .byte 71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .p2align 6 .text #endif
marvin-hansen/iggy-streaming-system
3,296,473
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/aesni-gcm-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl _gcm_init_avx512 .private_extern _gcm_init_avx512 .private_extern _gcm_init_avx512 .p2align 5 _gcm_init_avx512: .byte 243,15,30,250 vmovdqu64 (%rsi),%xmm16 vpalignr $8,%xmm16,%xmm16,%xmm16 vmovdqa64 %xmm16,%xmm2 vpsllq $1,%xmm16,%xmm16 vpsrlq $63,%xmm2,%xmm2 vmovdqa %xmm2,%xmm1 vpslldq $8,%xmm2,%xmm2 vpsrldq $8,%xmm1,%xmm1 vporq %xmm2,%xmm16,%xmm16 vpshufd $36,%xmm1,%xmm2 vpcmpeqd TWOONE(%rip),%xmm2,%xmm2 vpand POLY(%rip),%xmm2,%xmm2 vpxorq %xmm2,%xmm16,%xmm16 vmovdqu64 %xmm16,240(%rdi) vshufi32x4 $0x00,%ymm16,%ymm16,%ymm4 vmovdqa %ymm4,%ymm3 .byte 98,243,101,40,68,196,17 .byte 98,243,101,40,68,204,0 .byte 98,243,101,40,68,212,1 .byte 98,243,101,40,68,220,16 vpxorq %ymm2,%ymm3,%ymm3 vpsrldq $8,%ymm3,%ymm2 vpslldq $8,%ymm3,%ymm3 vpxorq %ymm2,%ymm0,%ymm0 vpxorq %ymm1,%ymm3,%ymm3 vmovdqu64 POLY2(%rip),%ymm2 .byte 98,243,109,40,68,203,1 vpslldq $8,%ymm1,%ymm1 vpxorq %ymm1,%ymm3,%ymm3 .byte 98,243,109,40,68,203,0 vpsrldq $4,%ymm1,%ymm1 .byte 98,243,109,40,68,219,16 vpslldq $4,%ymm3,%ymm3 vpternlogq $0x96,%ymm1,%ymm0,%ymm3 vmovdqu64 %xmm3,224(%rdi) vinserti64x2 $1,%xmm16,%ymm3,%ymm4 vmovdqa64 %ymm4,%ymm5 .byte 98,243,93,40,68,195,17 .byte 98,243,93,40,68,203,0 .byte 98,243,93,40,68,211,1 .byte 98,243,93,40,68,227,16 vpxorq %ymm2,%ymm4,%ymm4 vpsrldq $8,%ymm4,%ymm2 vpslldq $8,%ymm4,%ymm4 vpxorq %ymm2,%ymm0,%ymm0 vpxorq %ymm1,%ymm4,%ymm4 vmovdqu64 POLY2(%rip),%ymm2 .byte 98,243,109,40,68,204,1 vpslldq $8,%ymm1,%ymm1 vpxorq %ymm1,%ymm4,%ymm4 .byte 98,243,109,40,68,204,0 vpsrldq $4,%ymm1,%ymm1 .byte 98,243,109,40,68,228,16 vpslldq $4,%ymm4,%ymm4 vpternlogq $0x96,%ymm1,%ymm0,%ymm4 vmovdqu64 %ymm4,192(%rdi) vinserti64x4 $1,%ymm5,%zmm4,%zmm4 vshufi64x2 $0x00,%zmm4,%zmm4,%zmm3 vmovdqa64 %zmm4,%zmm5 .byte 98,243,93,72,68,195,17 .byte 98,243,93,72,68,203,0 .byte 98,243,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm2,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm2 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm2,%zmm0,%zmm0 vpxorq %zmm1,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm2 .byte 98,243,109,72,68,204,1 vpslldq $8,%zmm1,%zmm1 vpxorq %zmm1,%zmm4,%zmm4 .byte 98,243,109,72,68,204,0 vpsrldq $4,%zmm1,%zmm1 .byte 98,243,109,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm1,%zmm0,%zmm4 vmovdqu64 %zmm4,128(%rdi) vshufi64x2 $0x00,%zmm4,%zmm4,%zmm3 .byte 98,243,85,72,68,195,17 .byte 98,243,85,72,68,203,0 .byte 98,243,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm2,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm2 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm2,%zmm0,%zmm0 vpxorq %zmm1,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm2 .byte 98,243,109,72,68,205,1 vpslldq $8,%zmm1,%zmm1 vpxorq %zmm1,%zmm5,%zmm5 .byte 98,243,109,72,68,205,0 vpsrldq $4,%zmm1,%zmm1 .byte 98,243,109,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm1,%zmm0,%zmm5 vmovdqu64 %zmm5,64(%rdi) .byte 98,243,93,72,68,195,17 .byte 98,243,93,72,68,203,0 .byte 98,243,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm2,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm2 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm2,%zmm0,%zmm0 vpxorq %zmm1,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm2 .byte 98,243,109,72,68,204,1 vpslldq $8,%zmm1,%zmm1 vpxorq %zmm1,%zmm4,%zmm4 .byte 98,243,109,72,68,204,0 vpsrldq $4,%zmm1,%zmm1 .byte 98,243,109,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm1,%zmm0,%zmm4 vmovdqu64 %zmm4,0(%rdi) vzeroupper L$exit_init: .byte 0xf3,0xc3 .globl _gcm_gmult_avx512 .private_extern _gcm_gmult_avx512 .private_extern _gcm_gmult_avx512 .p2align 5 _gcm_gmult_avx512: .byte 243,15,30,250 vmovdqu64 (%rdi),%xmm1 vpshufb SHUF_MASK(%rip),%xmm1,%xmm1 vmovdqu64 240(%rsi),%xmm2 .byte 98,243,117,8,68,218,17 .byte 98,243,117,8,68,226,0 .byte 98,243,117,8,68,234,1 .byte 98,243,117,8,68,202,16 vpxorq %xmm5,%xmm1,%xmm1 vpsrldq $8,%xmm1,%xmm5 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm5,%xmm3,%xmm3 vpxorq %xmm4,%xmm1,%xmm1 vmovdqu64 POLY2(%rip),%xmm5 .byte 98,243,85,8,68,225,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm1,%xmm1 .byte 98,243,85,8,68,225,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,243,85,8,68,201,16 vpslldq $4,%xmm1,%xmm1 vpternlogq $0x96,%xmm4,%xmm3,%xmm1 vpshufb SHUF_MASK(%rip),%xmm1,%xmm1 vmovdqu64 %xmm1,(%rdi) vzeroupper L$exit_gmult: .byte 0xf3,0xc3 .globl _gcm_ghash_avx512 .private_extern _gcm_ghash_avx512 .private_extern _gcm_ghash_avx512 .p2align 5 _gcm_ghash_avx512: L$ghash_seh_begin: .byte 243,15,30,250 pushq %rbx L$ghash_seh_push_rbx: pushq %rbp L$ghash_seh_push_rbp: pushq %r12 L$ghash_seh_push_r12: pushq %r13 L$ghash_seh_push_r13: pushq %r14 L$ghash_seh_push_r14: pushq %r15 L$ghash_seh_push_r15: leaq 0(%rsp),%rbp L$ghash_seh_setfp: L$ghash_seh_prolog_end: subq $820,%rsp andq $(-64),%rsp vmovdqu64 (%rdi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movq %rdx,%r10 movq %rcx,%r11 orq %r11,%r11 jz L$_CALC_AAD_done_hEgxyDlCngwrfFe xorq %rbx,%rbx vmovdqa64 SHUF_MASK(%rip),%zmm16 L$_get_AAD_loop48x16_hEgxyDlCngwrfFe: cmpq $768,%r11 jl L$_exit_AAD_loop48x16_hEgxyDlCngwrfFe vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 testq %rbx,%rbx jnz L$_skip_hkeys_precomputation_amivrujEyduiFoi vmovdqu64 192(%rsi),%zmm1 vmovdqu64 %zmm1,704(%rsp) vmovdqu64 128(%rsi),%zmm9 vmovdqu64 %zmm9,640(%rsp) vshufi64x2 $0x00,%zmm9,%zmm9,%zmm9 vmovdqu64 64(%rsi),%zmm10 vmovdqu64 %zmm10,576(%rsp) vmovdqu64 0(%rsi),%zmm12 vmovdqu64 %zmm12,512(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,448(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,384(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,320(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,256(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,192(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,128(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,64(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,0(%rsp) L$_skip_hkeys_precomputation_amivrujEyduiFoi: movq $1,%rbx vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 0(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 64(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 128(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 192(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 256(%r10),%zmm11 vmovdqu64 320(%r10),%zmm3 vmovdqu64 384(%r10),%zmm4 vmovdqu64 448(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 256(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 320(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 384(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 448(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 512(%r10),%zmm11 vmovdqu64 576(%r10),%zmm3 vmovdqu64 640(%r10),%zmm4 vmovdqu64 704(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 512(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 576(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 640(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 704(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,115,45,8,68,241,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm6,%xmm9,%xmm14 subq $768,%r11 je L$_CALC_AAD_done_hEgxyDlCngwrfFe addq $768,%r10 jmp L$_get_AAD_loop48x16_hEgxyDlCngwrfFe L$_exit_AAD_loop48x16_hEgxyDlCngwrfFe: cmpq $512,%r11 jl L$_less_than_32x16_hEgxyDlCngwrfFe vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 testq %rbx,%rbx jnz L$_skip_hkeys_precomputation_wcpqaDvsGlbjGoe vmovdqu64 192(%rsi),%zmm1 vmovdqu64 %zmm1,704(%rsp) vmovdqu64 128(%rsi),%zmm9 vmovdqu64 %zmm9,640(%rsp) vshufi64x2 $0x00,%zmm9,%zmm9,%zmm9 vmovdqu64 64(%rsi),%zmm10 vmovdqu64 %zmm10,576(%rsp) vmovdqu64 0(%rsi),%zmm12 vmovdqu64 %zmm12,512(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,448(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,384(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,320(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,256(%rsp) L$_skip_hkeys_precomputation_wcpqaDvsGlbjGoe: movq $1,%rbx vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 256(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 320(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 384(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 448(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 256(%r10),%zmm11 vmovdqu64 320(%r10),%zmm3 vmovdqu64 384(%r10),%zmm4 vmovdqu64 448(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 512(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 576(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 640(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 704(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,115,45,8,68,241,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm6,%xmm9,%xmm14 subq $512,%r11 je L$_CALC_AAD_done_hEgxyDlCngwrfFe addq $512,%r10 jmp L$_less_than_16x16_hEgxyDlCngwrfFe L$_less_than_32x16_hEgxyDlCngwrfFe: cmpq $256,%r11 jl L$_less_than_16x16_hEgxyDlCngwrfFe vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 0(%rsi),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 64(%rsi),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 128(%rsi),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 192(%rsi),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,115,45,8,68,241,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm6,%xmm9,%xmm14 subq $256,%r11 je L$_CALC_AAD_done_hEgxyDlCngwrfFe addq $256,%r10 L$_less_than_16x16_hEgxyDlCngwrfFe: leaq byte64_len_to_mask_table(%rip),%r12 leaq (%r12,%r11,8),%r12 addl $15,%r11d shrl $4,%r11d cmpl $2,%r11d jb L$_AAD_blocks_1_hEgxyDlCngwrfFe je L$_AAD_blocks_2_hEgxyDlCngwrfFe cmpl $4,%r11d jb L$_AAD_blocks_3_hEgxyDlCngwrfFe je L$_AAD_blocks_4_hEgxyDlCngwrfFe cmpl $6,%r11d jb L$_AAD_blocks_5_hEgxyDlCngwrfFe je L$_AAD_blocks_6_hEgxyDlCngwrfFe cmpl $8,%r11d jb L$_AAD_blocks_7_hEgxyDlCngwrfFe je L$_AAD_blocks_8_hEgxyDlCngwrfFe cmpl $10,%r11d jb L$_AAD_blocks_9_hEgxyDlCngwrfFe je L$_AAD_blocks_10_hEgxyDlCngwrfFe cmpl $12,%r11d jb L$_AAD_blocks_11_hEgxyDlCngwrfFe je L$_AAD_blocks_12_hEgxyDlCngwrfFe cmpl $14,%r11d jb L$_AAD_blocks_13_hEgxyDlCngwrfFe je L$_AAD_blocks_14_hEgxyDlCngwrfFe cmpl $15,%r11d je L$_AAD_blocks_15_hEgxyDlCngwrfFe L$_AAD_blocks_16_hEgxyDlCngwrfFe: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%zmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 0(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 64(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 128(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm9,%zmm11,%zmm1 vpternlogq $0x96,%zmm10,%zmm3,%zmm6 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm12,%zmm11,%zmm7 vpternlogq $0x96,%zmm13,%zmm3,%zmm8 vmovdqu64 192(%rsi),%zmm15 .byte 98,83,85,72,68,207,17 .byte 98,83,85,72,68,215,0 .byte 98,83,85,72,68,231,1 .byte 98,83,85,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_15_hEgxyDlCngwrfFe: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%zmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 16(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 80(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 144(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 208(%rsi),%ymm15 vinserti64x2 $2,240(%rsi),%zmm15,%zmm15 .byte 98,211,85,72,68,255,1 .byte 98,83,85,72,68,199,16 .byte 98,211,85,72,68,207,17 .byte 98,211,85,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_14_hEgxyDlCngwrfFe: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%ymm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %ymm16,%ymm5,%ymm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 32(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 96(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 160(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 224(%rsi),%ymm15 .byte 98,211,85,40,68,255,1 .byte 98,83,85,40,68,199,16 .byte 98,211,85,40,68,207,17 .byte 98,211,85,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_13_hEgxyDlCngwrfFe: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%xmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %xmm16,%xmm5,%xmm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 48(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 112(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 176(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 240(%rsi),%xmm15 .byte 98,211,85,8,68,255,1 .byte 98,83,85,8,68,199,16 .byte 98,211,85,8,68,207,17 .byte 98,211,85,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_12_hEgxyDlCngwrfFe: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 64(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 128(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 192(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_11_hEgxyDlCngwrfFe: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 80(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 144(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 208(%rsi),%ymm15 vinserti64x2 $2,240(%rsi),%zmm15,%zmm15 .byte 98,211,93,72,68,255,1 .byte 98,83,93,72,68,199,16 .byte 98,211,93,72,68,207,17 .byte 98,211,93,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_10_hEgxyDlCngwrfFe: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%ymm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %ymm16,%ymm4,%ymm4 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 96(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 160(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 224(%rsi),%ymm15 .byte 98,211,93,40,68,255,1 .byte 98,83,93,40,68,199,16 .byte 98,211,93,40,68,207,17 .byte 98,211,93,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_9_hEgxyDlCngwrfFe: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%xmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %xmm16,%xmm4,%xmm4 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 112(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 176(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 240(%rsi),%xmm15 .byte 98,211,93,8,68,255,1 .byte 98,83,93,8,68,199,16 .byte 98,211,93,8,68,207,17 .byte 98,211,93,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_8_hEgxyDlCngwrfFe: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 128(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 192(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_7_hEgxyDlCngwrfFe: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 144(%rsi),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 208(%rsi),%ymm15 vinserti64x2 $2,240(%rsi),%zmm15,%zmm15 .byte 98,211,101,72,68,255,1 .byte 98,83,101,72,68,199,16 .byte 98,211,101,72,68,207,17 .byte 98,211,101,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_6_hEgxyDlCngwrfFe: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%ymm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %ymm16,%ymm3,%ymm3 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 160(%rsi),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 224(%rsi),%ymm15 .byte 98,211,101,40,68,255,1 .byte 98,83,101,40,68,199,16 .byte 98,211,101,40,68,207,17 .byte 98,211,101,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_5_hEgxyDlCngwrfFe: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%xmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %xmm16,%xmm3,%xmm3 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 176(%rsi),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 240(%rsi),%xmm15 .byte 98,211,101,8,68,255,1 .byte 98,83,101,8,68,199,16 .byte 98,211,101,8,68,207,17 .byte 98,211,101,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_4_hEgxyDlCngwrfFe: kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 192(%rsi),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_3_hEgxyDlCngwrfFe: kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 208(%rsi),%ymm15 vinserti64x2 $2,240(%rsi),%zmm15,%zmm15 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_2_hEgxyDlCngwrfFe: kmovq (%r12),%k1 vmovdqu8 0(%r10),%ymm11{%k1}{z} vpshufb %ymm16,%ymm11,%ymm11 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 224(%rsi),%ymm15 .byte 98,211,37,40,68,255,1 .byte 98,83,37,40,68,199,16 .byte 98,211,37,40,68,207,17 .byte 98,211,37,40,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp L$_CALC_AAD_done_hEgxyDlCngwrfFe L$_AAD_blocks_1_hEgxyDlCngwrfFe: kmovq (%r12),%k1 vmovdqu8 0(%r10),%xmm11{%k1}{z} vpshufb %xmm16,%xmm11,%xmm11 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 240(%rsi),%xmm15 .byte 98,211,37,8,68,255,1 .byte 98,83,37,8,68,199,16 .byte 98,211,37,8,68,207,17 .byte 98,211,37,8,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 L$_CALC_AAD_done_hEgxyDlCngwrfFe: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,(%rdi) cmpq $256,%rcx jbe L$skip_hkeys_cleanup_EmbgEptodyewbFa vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %zmm0,0(%rsp) vmovdqa64 %zmm0,64(%rsp) vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) vmovdqa64 %zmm0,320(%rsp) vmovdqa64 %zmm0,384(%rsp) vmovdqa64 %zmm0,448(%rsp) vmovdqa64 %zmm0,512(%rsp) vmovdqa64 %zmm0,576(%rsp) vmovdqa64 %zmm0,640(%rsp) vmovdqa64 %zmm0,704(%rsp) L$skip_hkeys_cleanup_EmbgEptodyewbFa: vzeroupper leaq (%rbp),%rsp popq %r15 popq %r14 popq %r13 popq %r12 popq %rbp popq %rbx L$exit_ghash: .byte 0xf3,0xc3 L$ghash_seh_end: .globl _gcm_setiv_avx512 .private_extern _gcm_setiv_avx512 .private_extern _gcm_setiv_avx512 .p2align 5 _gcm_setiv_avx512: L$setiv_seh_begin: .byte 243,15,30,250 pushq %rbx L$setiv_seh_push_rbx: pushq %rbp L$setiv_seh_push_rbp: pushq %r12 L$setiv_seh_push_r12: pushq %r13 L$setiv_seh_push_r13: pushq %r14 L$setiv_seh_push_r14: pushq %r15 L$setiv_seh_push_r15: leaq 0(%rsp),%rbp L$setiv_seh_setfp: L$setiv_seh_prolog_end: subq $820,%rsp andq $(-64),%rsp cmpq $12,%rcx je iv_len_12_init_IV vpxor %xmm2,%xmm2,%xmm2 leaq 80(%rsi),%r13 movq %rdx,%r10 movq %rcx,%r11 orq %r11,%r11 jz L$_CALC_AAD_done_bnzFsuvmDknpsbp xorq %rbx,%rbx vmovdqa64 SHUF_MASK(%rip),%zmm16 L$_get_AAD_loop48x16_bnzFsuvmDknpsbp: cmpq $768,%r11 jl L$_exit_AAD_loop48x16_bnzFsuvmDknpsbp vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 testq %rbx,%rbx jnz L$_skip_hkeys_precomputation_dBmbyqhifbmbobw vmovdqu64 192(%r13),%zmm1 vmovdqu64 %zmm1,704(%rsp) vmovdqu64 128(%r13),%zmm9 vmovdqu64 %zmm9,640(%rsp) vshufi64x2 $0x00,%zmm9,%zmm9,%zmm9 vmovdqu64 64(%r13),%zmm10 vmovdqu64 %zmm10,576(%rsp) vmovdqu64 0(%r13),%zmm12 vmovdqu64 %zmm12,512(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,448(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,384(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,320(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,256(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,192(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,128(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,64(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,0(%rsp) L$_skip_hkeys_precomputation_dBmbyqhifbmbobw: movq $1,%rbx vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 0(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 64(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 128(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 192(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 256(%r10),%zmm11 vmovdqu64 320(%r10),%zmm3 vmovdqu64 384(%r10),%zmm4 vmovdqu64 448(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 256(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 320(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 384(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 448(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 512(%r10),%zmm11 vmovdqu64 576(%r10),%zmm3 vmovdqu64 640(%r10),%zmm4 vmovdqu64 704(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 512(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 576(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 640(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 704(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,243,45,8,68,209,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm6,%xmm9,%xmm2 subq $768,%r11 je L$_CALC_AAD_done_bnzFsuvmDknpsbp addq $768,%r10 jmp L$_get_AAD_loop48x16_bnzFsuvmDknpsbp L$_exit_AAD_loop48x16_bnzFsuvmDknpsbp: cmpq $512,%r11 jl L$_less_than_32x16_bnzFsuvmDknpsbp vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 testq %rbx,%rbx jnz L$_skip_hkeys_precomputation_javBbvtBBkicjdB vmovdqu64 192(%r13),%zmm1 vmovdqu64 %zmm1,704(%rsp) vmovdqu64 128(%r13),%zmm9 vmovdqu64 %zmm9,640(%rsp) vshufi64x2 $0x00,%zmm9,%zmm9,%zmm9 vmovdqu64 64(%r13),%zmm10 vmovdqu64 %zmm10,576(%rsp) vmovdqu64 0(%r13),%zmm12 vmovdqu64 %zmm12,512(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,448(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,384(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,320(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,256(%rsp) L$_skip_hkeys_precomputation_javBbvtBBkicjdB: movq $1,%rbx vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 256(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 320(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 384(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 448(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 256(%r10),%zmm11 vmovdqu64 320(%r10),%zmm3 vmovdqu64 384(%r10),%zmm4 vmovdqu64 448(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 512(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 576(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 640(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 704(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,243,45,8,68,209,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm6,%xmm9,%xmm2 subq $512,%r11 je L$_CALC_AAD_done_bnzFsuvmDknpsbp addq $512,%r10 jmp L$_less_than_16x16_bnzFsuvmDknpsbp L$_less_than_32x16_bnzFsuvmDknpsbp: cmpq $256,%r11 jl L$_less_than_16x16_bnzFsuvmDknpsbp vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 0(%r13),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 64(%r13),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 128(%r13),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 192(%r13),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,243,45,8,68,209,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm6,%xmm9,%xmm2 subq $256,%r11 je L$_CALC_AAD_done_bnzFsuvmDknpsbp addq $256,%r10 L$_less_than_16x16_bnzFsuvmDknpsbp: leaq byte64_len_to_mask_table(%rip),%r12 leaq (%r12,%r11,8),%r12 addl $15,%r11d shrl $4,%r11d cmpl $2,%r11d jb L$_AAD_blocks_1_bnzFsuvmDknpsbp je L$_AAD_blocks_2_bnzFsuvmDknpsbp cmpl $4,%r11d jb L$_AAD_blocks_3_bnzFsuvmDknpsbp je L$_AAD_blocks_4_bnzFsuvmDknpsbp cmpl $6,%r11d jb L$_AAD_blocks_5_bnzFsuvmDknpsbp je L$_AAD_blocks_6_bnzFsuvmDknpsbp cmpl $8,%r11d jb L$_AAD_blocks_7_bnzFsuvmDknpsbp je L$_AAD_blocks_8_bnzFsuvmDknpsbp cmpl $10,%r11d jb L$_AAD_blocks_9_bnzFsuvmDknpsbp je L$_AAD_blocks_10_bnzFsuvmDknpsbp cmpl $12,%r11d jb L$_AAD_blocks_11_bnzFsuvmDknpsbp je L$_AAD_blocks_12_bnzFsuvmDknpsbp cmpl $14,%r11d jb L$_AAD_blocks_13_bnzFsuvmDknpsbp je L$_AAD_blocks_14_bnzFsuvmDknpsbp cmpl $15,%r11d je L$_AAD_blocks_15_bnzFsuvmDknpsbp L$_AAD_blocks_16_bnzFsuvmDknpsbp: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%zmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 0(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 64(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 128(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm9,%zmm11,%zmm1 vpternlogq $0x96,%zmm10,%zmm3,%zmm6 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm12,%zmm11,%zmm7 vpternlogq $0x96,%zmm13,%zmm3,%zmm8 vmovdqu64 192(%r13),%zmm15 .byte 98,83,85,72,68,207,17 .byte 98,83,85,72,68,215,0 .byte 98,83,85,72,68,231,1 .byte 98,83,85,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_15_bnzFsuvmDknpsbp: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%zmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 16(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 80(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 144(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 208(%r13),%ymm15 vinserti64x2 $2,240(%r13),%zmm15,%zmm15 .byte 98,211,85,72,68,255,1 .byte 98,83,85,72,68,199,16 .byte 98,211,85,72,68,207,17 .byte 98,211,85,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_14_bnzFsuvmDknpsbp: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%ymm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %ymm16,%ymm5,%ymm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 32(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 96(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 160(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 224(%r13),%ymm15 .byte 98,211,85,40,68,255,1 .byte 98,83,85,40,68,199,16 .byte 98,211,85,40,68,207,17 .byte 98,211,85,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_13_bnzFsuvmDknpsbp: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%xmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %xmm16,%xmm5,%xmm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 48(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 112(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 176(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 240(%r13),%xmm15 .byte 98,211,85,8,68,255,1 .byte 98,83,85,8,68,199,16 .byte 98,211,85,8,68,207,17 .byte 98,211,85,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_12_bnzFsuvmDknpsbp: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 64(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 128(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 192(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_11_bnzFsuvmDknpsbp: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 80(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 144(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 208(%r13),%ymm15 vinserti64x2 $2,240(%r13),%zmm15,%zmm15 .byte 98,211,93,72,68,255,1 .byte 98,83,93,72,68,199,16 .byte 98,211,93,72,68,207,17 .byte 98,211,93,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_10_bnzFsuvmDknpsbp: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%ymm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %ymm16,%ymm4,%ymm4 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 96(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 160(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 224(%r13),%ymm15 .byte 98,211,93,40,68,255,1 .byte 98,83,93,40,68,199,16 .byte 98,211,93,40,68,207,17 .byte 98,211,93,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_9_bnzFsuvmDknpsbp: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%xmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %xmm16,%xmm4,%xmm4 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 112(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 176(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 240(%r13),%xmm15 .byte 98,211,93,8,68,255,1 .byte 98,83,93,8,68,199,16 .byte 98,211,93,8,68,207,17 .byte 98,211,93,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_8_bnzFsuvmDknpsbp: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 128(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 192(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_7_bnzFsuvmDknpsbp: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 144(%r13),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 208(%r13),%ymm15 vinserti64x2 $2,240(%r13),%zmm15,%zmm15 .byte 98,211,101,72,68,255,1 .byte 98,83,101,72,68,199,16 .byte 98,211,101,72,68,207,17 .byte 98,211,101,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_6_bnzFsuvmDknpsbp: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%ymm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %ymm16,%ymm3,%ymm3 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 160(%r13),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 224(%r13),%ymm15 .byte 98,211,101,40,68,255,1 .byte 98,83,101,40,68,199,16 .byte 98,211,101,40,68,207,17 .byte 98,211,101,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_5_bnzFsuvmDknpsbp: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%xmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %xmm16,%xmm3,%xmm3 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 176(%r13),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 240(%r13),%xmm15 .byte 98,211,101,8,68,255,1 .byte 98,83,101,8,68,199,16 .byte 98,211,101,8,68,207,17 .byte 98,211,101,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_4_bnzFsuvmDknpsbp: kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 192(%r13),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_3_bnzFsuvmDknpsbp: kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 208(%r13),%ymm15 vinserti64x2 $2,240(%r13),%zmm15,%zmm15 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_2_bnzFsuvmDknpsbp: kmovq (%r12),%k1 vmovdqu8 0(%r10),%ymm11{%k1}{z} vpshufb %ymm16,%ymm11,%ymm11 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 224(%r13),%ymm15 .byte 98,211,37,40,68,255,1 .byte 98,83,37,40,68,199,16 .byte 98,211,37,40,68,207,17 .byte 98,211,37,40,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp L$_CALC_AAD_done_bnzFsuvmDknpsbp L$_AAD_blocks_1_bnzFsuvmDknpsbp: kmovq (%r12),%k1 vmovdqu8 0(%r10),%xmm11{%k1}{z} vpshufb %xmm16,%xmm11,%xmm11 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 240(%r13),%xmm15 .byte 98,211,37,8,68,255,1 .byte 98,83,37,8,68,199,16 .byte 98,211,37,8,68,207,17 .byte 98,211,37,8,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 L$_CALC_AAD_done_bnzFsuvmDknpsbp: movq %rcx,%r10 shlq $3,%r10 vmovq %r10,%xmm3 vpxorq %xmm2,%xmm3,%xmm2 vmovdqu64 240(%r13),%xmm1 .byte 98,115,109,8,68,217,17 .byte 98,243,109,8,68,217,0 .byte 98,243,109,8,68,225,1 .byte 98,243,109,8,68,209,16 vpxorq %xmm4,%xmm2,%xmm2 vpsrldq $8,%xmm2,%xmm4 vpslldq $8,%xmm2,%xmm2 vpxorq %xmm4,%xmm11,%xmm11 vpxorq %xmm3,%xmm2,%xmm2 vmovdqu64 POLY2(%rip),%xmm4 .byte 98,243,93,8,68,218,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm2,%xmm2 .byte 98,243,93,8,68,218,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,243,93,8,68,210,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm3,%xmm11,%xmm2 vpshufb SHUF_MASK(%rip),%xmm2,%xmm2 jmp skip_iv_len_12_init_IV iv_len_12_init_IV: vmovdqu8 ONEf(%rip),%xmm2 movq %rdx,%r11 movl $0x0000000000000fff,%r10d kmovq %r10,%k1 vmovdqu8 (%r11),%xmm2{%k1} skip_iv_len_12_init_IV: vmovdqu %xmm2,%xmm1 movl 240(%rdi),%r10d cmpl $9,%r10d je L$aes_128_otBvnbdyuroewzD cmpl $11,%r10d je L$aes_192_otBvnbdyuroewzD cmpl $13,%r10d je L$aes_256_otBvnbdyuroewzD jmp L$exit_aes_otBvnbdyuroewzD .p2align 5 L$aes_128_otBvnbdyuroewzD: vpxorq 0(%rdi),%xmm1,%xmm1 .byte 98,242,117,8,220,79,1 .byte 98,242,117,8,220,79,2 .byte 98,242,117,8,220,79,3 .byte 98,242,117,8,220,79,4 .byte 98,242,117,8,220,79,5 .byte 98,242,117,8,220,79,6 .byte 98,242,117,8,220,79,7 .byte 98,242,117,8,220,79,8 .byte 98,242,117,8,220,79,9 .byte 98,242,117,8,221,79,10 jmp L$exit_aes_otBvnbdyuroewzD .p2align 5 L$aes_192_otBvnbdyuroewzD: vpxorq 0(%rdi),%xmm1,%xmm1 .byte 98,242,117,8,220,79,1 .byte 98,242,117,8,220,79,2 .byte 98,242,117,8,220,79,3 .byte 98,242,117,8,220,79,4 .byte 98,242,117,8,220,79,5 .byte 98,242,117,8,220,79,6 .byte 98,242,117,8,220,79,7 .byte 98,242,117,8,220,79,8 .byte 98,242,117,8,220,79,9 .byte 98,242,117,8,220,79,10 .byte 98,242,117,8,220,79,11 .byte 98,242,117,8,221,79,12 jmp L$exit_aes_otBvnbdyuroewzD .p2align 5 L$aes_256_otBvnbdyuroewzD: vpxorq 0(%rdi),%xmm1,%xmm1 .byte 98,242,117,8,220,79,1 .byte 98,242,117,8,220,79,2 .byte 98,242,117,8,220,79,3 .byte 98,242,117,8,220,79,4 .byte 98,242,117,8,220,79,5 .byte 98,242,117,8,220,79,6 .byte 98,242,117,8,220,79,7 .byte 98,242,117,8,220,79,8 .byte 98,242,117,8,220,79,9 .byte 98,242,117,8,220,79,10 .byte 98,242,117,8,220,79,11 .byte 98,242,117,8,220,79,12 .byte 98,242,117,8,220,79,13 .byte 98,242,117,8,221,79,14 jmp L$exit_aes_otBvnbdyuroewzD L$exit_aes_otBvnbdyuroewzD: vmovdqu %xmm1,32(%rsi) vpshufb SHUF_MASK(%rip),%xmm2,%xmm2 vmovdqu %xmm2,0(%rsi) L$exit_setiv: cmpq $256,%rcx jbe L$skip_hkeys_cleanup_lDGzdqCkvgheosr vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %zmm0,0(%rsp) vmovdqa64 %zmm0,64(%rsp) vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) vmovdqa64 %zmm0,320(%rsp) vmovdqa64 %zmm0,384(%rsp) vmovdqa64 %zmm0,448(%rsp) vmovdqa64 %zmm0,512(%rsp) vmovdqa64 %zmm0,576(%rsp) vmovdqa64 %zmm0,640(%rsp) vmovdqa64 %zmm0,704(%rsp) L$skip_hkeys_cleanup_lDGzdqCkvgheosr: vzeroupper leaq (%rbp),%rsp popq %r15 popq %r14 popq %r13 popq %r12 popq %rbp popq %rbx .byte 0xf3,0xc3 L$setiv_seh_end: .globl _aes_gcm_encrypt_avx512 .private_extern _aes_gcm_encrypt_avx512 .private_extern _aes_gcm_encrypt_avx512 .p2align 5 _aes_gcm_encrypt_avx512: L$encrypt_seh_begin: #ifdef BORINGSSL_DISPATCH_TEST movb $1,_BORINGSSL_function_hit+7(%rip) #endif .byte 243,15,30,250 pushq %rbx L$encrypt_seh_push_rbx: pushq %rbp L$encrypt_seh_push_rbp: pushq %r12 L$encrypt_seh_push_r12: pushq %r13 L$encrypt_seh_push_r13: pushq %r14 L$encrypt_seh_push_r14: pushq %r15 L$encrypt_seh_push_r15: leaq 0(%rsp),%rbp L$encrypt_seh_setfp: L$encrypt_seh_prolog_end: subq $1588,%rsp andq $(-64),%rsp movl 240(%rdi),%eax cmpl $9,%eax je L$aes_gcm_encrypt_128_avx512 cmpl $11,%eax je L$aes_gcm_encrypt_192_avx512 cmpl $13,%eax je L$aes_gcm_encrypt_256_avx512 xorl %eax,%eax jmp L$exit_gcm_encrypt .p2align 5 L$aes_gcm_encrypt_128_avx512: orq %r8,%r8 je L$_enc_dec_abort_pzwgkGgbplFqzaB xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je L$_partial_block_done_FkezCgctzlCoEyh movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge L$_no_extra_mask_FkezCgctzlCoEyh subq %r13,%r12 L$_no_extra_mask_FkezCgctzlCoEyh: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm3,%xmm14,%xmm14 cmpq $0,%r13 jl L$_partial_incomplete_FkezCgctzlCoEyh .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp L$_enc_dec_done_FkezCgctzlCoEyh L$_partial_incomplete_FkezCgctzlCoEyh: addl %r8d,(%rdx) movq %r8,%rax L$_enc_dec_done_FkezCgctzlCoEyh: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} L$_partial_block_done_FkezCgctzlCoEyh: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je L$_enc_dec_done_pzwgkGgbplFqzaB cmpq $256,%r8 jbe L$_message_below_equal_16_blocks_pzwgkGgbplFqzaB vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae L$_next_16_overflow_yByFrylbFDFnFCp vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_yByFrylbFDFnFCp L$_next_16_overflow_yByFrylbFDFnFCp: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_yByFrylbFDFnFCp: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_achfkmnqFwjgbDD vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) L$_skip_hkeys_precomputation_achfkmnqFwjgbDD: cmpq $512,%r8 jb L$_message_below_32_blocks_pzwgkGgbplFqzaB cmpb $240,%r15b jae L$_next_16_overflow_xvcFynjeulFjDdF vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_xvcFynjeulFjDdF L$_next_16_overflow_xvcFynjeulFjDdF: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_xvcFynjeulFjDdF: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz L$_skip_hkeys_precomputation_mBcrmCyGfEttetw vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) L$_skip_hkeys_precomputation_mBcrmCyGfEttetw: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb L$_no_more_big_nblocks_pzwgkGgbplFqzaB L$_encrypt_big_nblocks_pzwgkGgbplFqzaB: cmpb $240,%r15b jae L$_16_blocks_overflow_avoAfAGuxmumDjA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_avoAfAGuxmumDjA L$_16_blocks_overflow_avoAfAGuxmumDjA: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_avoAfAGuxmumDjA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_AGgjmjawDklDqyq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_AGgjmjawDklDqyq L$_16_blocks_overflow_AGgjmjawDklDqyq: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_AGgjmjawDklDqyq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_gutvpupplrsoEbw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_gutvpupplrsoEbw L$_16_blocks_overflow_gutvpupplrsoEbw: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_gutvpupplrsoEbw: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae L$_encrypt_big_nblocks_pzwgkGgbplFqzaB L$_no_more_big_nblocks_pzwgkGgbplFqzaB: cmpq $512,%r8 jae L$_encrypt_32_blocks_pzwgkGgbplFqzaB cmpq $256,%r8 jae L$_encrypt_16_blocks_pzwgkGgbplFqzaB L$_encrypt_0_blocks_ghash_32_pzwgkGgbplFqzaB: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_BdcphecxdpdFEsb cmpl $8,%r10d je L$_last_num_blocks_is_8_BdcphecxdpdFEsb jb L$_last_num_blocks_is_7_1_BdcphecxdpdFEsb cmpl $12,%r10d je L$_last_num_blocks_is_12_BdcphecxdpdFEsb jb L$_last_num_blocks_is_11_9_BdcphecxdpdFEsb cmpl $15,%r10d je L$_last_num_blocks_is_15_BdcphecxdpdFEsb ja L$_last_num_blocks_is_16_BdcphecxdpdFEsb cmpl $14,%r10d je L$_last_num_blocks_is_14_BdcphecxdpdFEsb jmp L$_last_num_blocks_is_13_BdcphecxdpdFEsb L$_last_num_blocks_is_11_9_BdcphecxdpdFEsb: cmpl $10,%r10d je L$_last_num_blocks_is_10_BdcphecxdpdFEsb ja L$_last_num_blocks_is_11_BdcphecxdpdFEsb jmp L$_last_num_blocks_is_9_BdcphecxdpdFEsb L$_last_num_blocks_is_7_1_BdcphecxdpdFEsb: cmpl $4,%r10d je L$_last_num_blocks_is_4_BdcphecxdpdFEsb jb L$_last_num_blocks_is_3_1_BdcphecxdpdFEsb cmpl $6,%r10d ja L$_last_num_blocks_is_7_BdcphecxdpdFEsb je L$_last_num_blocks_is_6_BdcphecxdpdFEsb jmp L$_last_num_blocks_is_5_BdcphecxdpdFEsb L$_last_num_blocks_is_3_1_BdcphecxdpdFEsb: cmpl $2,%r10d ja L$_last_num_blocks_is_3_BdcphecxdpdFEsb je L$_last_num_blocks_is_2_BdcphecxdpdFEsb L$_last_num_blocks_is_1_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_azzgqhumkfnyDqm vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_azzgqhumkfnyDqm L$_16_blocks_overflow_azzgqhumkfnyDqm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_azzgqhumkfnyDqm: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_dnmqhGDjDpgnine subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dnmqhGDjDpgnine L$_small_initial_partial_block_dnmqhGDjDpgnine: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_dnmqhGDjDpgnine L$_small_initial_compute_done_dnmqhGDjDpgnine: L$_after_reduction_dnmqhGDjDpgnine: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_2_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_yekhBCebufcAiFh vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_yekhBCebufcAiFh L$_16_blocks_overflow_yekhBCebufcAiFh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_yekhBCebufcAiFh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jwyvkjdvesmxGpv subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jwyvkjdvesmxGpv L$_small_initial_partial_block_jwyvkjdvesmxGpv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jwyvkjdvesmxGpv: orq %r8,%r8 je L$_after_reduction_jwyvkjdvesmxGpv vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jwyvkjdvesmxGpv: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_3_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_usjywjwllaabozc vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_usjywjwllaabozc L$_16_blocks_overflow_usjywjwllaabozc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_usjywjwllaabozc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lmkDAitgFzCCoEA subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lmkDAitgFzCCoEA L$_small_initial_partial_block_lmkDAitgFzCCoEA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lmkDAitgFzCCoEA: orq %r8,%r8 je L$_after_reduction_lmkDAitgFzCCoEA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lmkDAitgFzCCoEA: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_4_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_xobkzaAwcplaFgb vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_xobkzaAwcplaFgb L$_16_blocks_overflow_xobkzaAwcplaFgb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_xobkzaAwcplaFgb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_genGClghdbzBqhw subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_genGClghdbzBqhw L$_small_initial_partial_block_genGClghdbzBqhw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_genGClghdbzBqhw: orq %r8,%r8 je L$_after_reduction_genGClghdbzBqhw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_genGClghdbzBqhw: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_5_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_bpsqdGAhjeggABn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_bpsqdGAhjeggABn L$_16_blocks_overflow_bpsqdGAhjeggABn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_bpsqdGAhjeggABn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wdqrtGpojajFBea subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wdqrtGpojajFBea L$_small_initial_partial_block_wdqrtGpojajFBea: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wdqrtGpojajFBea: orq %r8,%r8 je L$_after_reduction_wdqrtGpojajFBea vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wdqrtGpojajFBea: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_6_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_qmgDCpkysmqcgnB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_qmgDCpkysmqcgnB L$_16_blocks_overflow_qmgDCpkysmqcgnB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_qmgDCpkysmqcgnB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GvjnkpjsgDafsun subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GvjnkpjsgDafsun L$_small_initial_partial_block_GvjnkpjsgDafsun: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GvjnkpjsgDafsun: orq %r8,%r8 je L$_after_reduction_GvjnkpjsgDafsun vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GvjnkpjsgDafsun: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_7_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_jaFyvjvpAfzmwyg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_jaFyvjvpAfzmwyg L$_16_blocks_overflow_jaFyvjvpAfzmwyg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_jaFyvjvpAfzmwyg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iuyAGoBcDewEeiy subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iuyAGoBcDewEeiy L$_small_initial_partial_block_iuyAGoBcDewEeiy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iuyAGoBcDewEeiy: orq %r8,%r8 je L$_after_reduction_iuyAGoBcDewEeiy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iuyAGoBcDewEeiy: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_8_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_FbwsrgpDGDmccid vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_FbwsrgpDGDmccid L$_16_blocks_overflow_FbwsrgpDGDmccid: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_FbwsrgpDGDmccid: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lABtdkpvoGeFpzp subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lABtdkpvoGeFpzp L$_small_initial_partial_block_lABtdkpvoGeFpzp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lABtdkpvoGeFpzp: orq %r8,%r8 je L$_after_reduction_lABtdkpvoGeFpzp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lABtdkpvoGeFpzp: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_9_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_dtxuExFwmpsGEiG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_dtxuExFwmpsGEiG L$_16_blocks_overflow_dtxuExFwmpsGEiG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_dtxuExFwmpsGEiG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vkADoeFsfDwilnv subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vkADoeFsfDwilnv L$_small_initial_partial_block_vkADoeFsfDwilnv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vkADoeFsfDwilnv: orq %r8,%r8 je L$_after_reduction_vkADoeFsfDwilnv vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vkADoeFsfDwilnv: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_10_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_damgrhyFxffganz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_damgrhyFxffganz L$_16_blocks_overflow_damgrhyFxffganz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_damgrhyFxffganz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iankhgrgFnoiAgG subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iankhgrgFnoiAgG L$_small_initial_partial_block_iankhgrgFnoiAgG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iankhgrgFnoiAgG: orq %r8,%r8 je L$_after_reduction_iankhgrgFnoiAgG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iankhgrgFnoiAgG: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_11_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_gnGEkpgDpmugvpk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_gnGEkpgDpmugvpk L$_16_blocks_overflow_gnGEkpgDpmugvpk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_gnGEkpgDpmugvpk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_sECkucceDhaBnCk subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_sECkucceDhaBnCk L$_small_initial_partial_block_sECkucceDhaBnCk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_sECkucceDhaBnCk: orq %r8,%r8 je L$_after_reduction_sECkucceDhaBnCk vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_sECkucceDhaBnCk: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_12_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_qkecuzhoaAuxmmC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_qkecuzhoaAuxmmC L$_16_blocks_overflow_qkecuzhoaAuxmmC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_qkecuzhoaAuxmmC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GEFnxzpzjbtbhxx subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GEFnxzpzjbtbhxx L$_small_initial_partial_block_GEFnxzpzjbtbhxx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GEFnxzpzjbtbhxx: orq %r8,%r8 je L$_after_reduction_GEFnxzpzjbtbhxx vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GEFnxzpzjbtbhxx: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_13_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_BjhkFcriuCnuFez vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_BjhkFcriuCnuFez L$_16_blocks_overflow_BjhkFcriuCnuFez: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_BjhkFcriuCnuFez: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jyxtluvpAmFhjFk subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jyxtluvpAmFhjFk L$_small_initial_partial_block_jyxtluvpAmFhjFk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jyxtluvpAmFhjFk: orq %r8,%r8 je L$_after_reduction_jyxtluvpAmFhjFk vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jyxtluvpAmFhjFk: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_14_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_kGBwgppdvolmGmc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_kGBwgppdvolmGmc L$_16_blocks_overflow_kGBwgppdvolmGmc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_kGBwgppdvolmGmc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AxanimCshomfwbg subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AxanimCshomfwbg L$_small_initial_partial_block_AxanimCshomfwbg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AxanimCshomfwbg: orq %r8,%r8 je L$_after_reduction_AxanimCshomfwbg vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AxanimCshomfwbg: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_15_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_EBkkfjcEDyEptfo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_EBkkfjcEDyEptfo L$_16_blocks_overflow_EBkkfjcEDyEptfo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_EBkkfjcEDyEptfo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_henbgxejEhFgymC subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_henbgxejEhFgymC L$_small_initial_partial_block_henbgxejEhFgymC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_henbgxejEhFgymC: orq %r8,%r8 je L$_after_reduction_henbgxejEhFgymC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_henbgxejEhFgymC: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_16_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_BlcvjlyDGzsAttk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_BlcvjlyDGzsAttk L$_16_blocks_overflow_BlcvjlyDGzsAttk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_BlcvjlyDGzsAttk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_aGAffhBljtiFsea: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_aGAffhBljtiFsea: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_aGAffhBljtiFsea: jmp L$_last_blocks_done_BdcphecxdpdFEsb L$_last_num_blocks_is_0_BdcphecxdpdFEsb: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_BdcphecxdpdFEsb: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_pzwgkGgbplFqzaB L$_encrypt_32_blocks_pzwgkGgbplFqzaB: cmpb $240,%r15b jae L$_16_blocks_overflow_zuczDhwqwDAmzjf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_zuczDhwqwDAmzjf L$_16_blocks_overflow_zuczDhwqwDAmzjf: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_zuczDhwqwDAmzjf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_CeGBtrGsogoqpyb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_CeGBtrGsogoqpyb L$_16_blocks_overflow_CeGBtrGsogoqpyb: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_CeGBtrGsogoqpyb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_CfrpfvcjvvrcbGa cmpl $8,%r10d je L$_last_num_blocks_is_8_CfrpfvcjvvrcbGa jb L$_last_num_blocks_is_7_1_CfrpfvcjvvrcbGa cmpl $12,%r10d je L$_last_num_blocks_is_12_CfrpfvcjvvrcbGa jb L$_last_num_blocks_is_11_9_CfrpfvcjvvrcbGa cmpl $15,%r10d je L$_last_num_blocks_is_15_CfrpfvcjvvrcbGa ja L$_last_num_blocks_is_16_CfrpfvcjvvrcbGa cmpl $14,%r10d je L$_last_num_blocks_is_14_CfrpfvcjvvrcbGa jmp L$_last_num_blocks_is_13_CfrpfvcjvvrcbGa L$_last_num_blocks_is_11_9_CfrpfvcjvvrcbGa: cmpl $10,%r10d je L$_last_num_blocks_is_10_CfrpfvcjvvrcbGa ja L$_last_num_blocks_is_11_CfrpfvcjvvrcbGa jmp L$_last_num_blocks_is_9_CfrpfvcjvvrcbGa L$_last_num_blocks_is_7_1_CfrpfvcjvvrcbGa: cmpl $4,%r10d je L$_last_num_blocks_is_4_CfrpfvcjvvrcbGa jb L$_last_num_blocks_is_3_1_CfrpfvcjvvrcbGa cmpl $6,%r10d ja L$_last_num_blocks_is_7_CfrpfvcjvvrcbGa je L$_last_num_blocks_is_6_CfrpfvcjvvrcbGa jmp L$_last_num_blocks_is_5_CfrpfvcjvvrcbGa L$_last_num_blocks_is_3_1_CfrpfvcjvvrcbGa: cmpl $2,%r10d ja L$_last_num_blocks_is_3_CfrpfvcjvvrcbGa je L$_last_num_blocks_is_2_CfrpfvcjvvrcbGa L$_last_num_blocks_is_1_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_dbajrbEcjsFpceD vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_dbajrbEcjsFpceD L$_16_blocks_overflow_dbajrbEcjsFpceD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_dbajrbEcjsFpceD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_qFrfFusofbDaigi subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qFrfFusofbDaigi L$_small_initial_partial_block_qFrfFusofbDaigi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_qFrfFusofbDaigi L$_small_initial_compute_done_qFrfFusofbDaigi: L$_after_reduction_qFrfFusofbDaigi: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_2_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_kgpAeeaoAnozgEF vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_kgpAeeaoAnozgEF L$_16_blocks_overflow_kgpAeeaoAnozgEF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_kgpAeeaoAnozgEF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zaugFxnkqnldtoD subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zaugFxnkqnldtoD L$_small_initial_partial_block_zaugFxnkqnldtoD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zaugFxnkqnldtoD: orq %r8,%r8 je L$_after_reduction_zaugFxnkqnldtoD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_zaugFxnkqnldtoD: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_3_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_kblsDeoCDCisntD vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_kblsDeoCDCisntD L$_16_blocks_overflow_kblsDeoCDCisntD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_kblsDeoCDCisntD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_Ajrbbfyxhsbqszm subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_Ajrbbfyxhsbqszm L$_small_initial_partial_block_Ajrbbfyxhsbqszm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_Ajrbbfyxhsbqszm: orq %r8,%r8 je L$_after_reduction_Ajrbbfyxhsbqszm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_Ajrbbfyxhsbqszm: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_4_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_eGcBplCnDqdtGiy vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_eGcBplCnDqdtGiy L$_16_blocks_overflow_eGcBplCnDqdtGiy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_eGcBplCnDqdtGiy: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xeEmmeAmgryyzGr subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xeEmmeAmgryyzGr L$_small_initial_partial_block_xeEmmeAmgryyzGr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xeEmmeAmgryyzGr: orq %r8,%r8 je L$_after_reduction_xeEmmeAmgryyzGr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_xeEmmeAmgryyzGr: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_5_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_bgsqDFmekFAimag vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_bgsqDFmekFAimag L$_16_blocks_overflow_bgsqDFmekFAimag: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_bgsqDFmekFAimag: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iwszuhryhslDkgD subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iwszuhryhslDkgD L$_small_initial_partial_block_iwszuhryhslDkgD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iwszuhryhslDkgD: orq %r8,%r8 je L$_after_reduction_iwszuhryhslDkgD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iwszuhryhslDkgD: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_6_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_oaGuttEwoetbnjp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_oaGuttEwoetbnjp L$_16_blocks_overflow_oaGuttEwoetbnjp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_oaGuttEwoetbnjp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_pulckbvkcxsatqu subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pulckbvkcxsatqu L$_small_initial_partial_block_pulckbvkcxsatqu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pulckbvkcxsatqu: orq %r8,%r8 je L$_after_reduction_pulckbvkcxsatqu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_pulckbvkcxsatqu: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_7_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_FvhiAqmdFpdFmlp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_FvhiAqmdFpdFmlp L$_16_blocks_overflow_FvhiAqmdFpdFmlp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_FvhiAqmdFpdFmlp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_sqobqxAEFkeiGsu subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_sqobqxAEFkeiGsu L$_small_initial_partial_block_sqobqxAEFkeiGsu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_sqobqxAEFkeiGsu: orq %r8,%r8 je L$_after_reduction_sqobqxAEFkeiGsu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_sqobqxAEFkeiGsu: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_8_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_hwGtCmqmcvackpz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_hwGtCmqmcvackpz L$_16_blocks_overflow_hwGtCmqmcvackpz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_hwGtCmqmcvackpz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_itiiyBtdfcskbai subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_itiiyBtdfcskbai L$_small_initial_partial_block_itiiyBtdfcskbai: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_itiiyBtdfcskbai: orq %r8,%r8 je L$_after_reduction_itiiyBtdfcskbai vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_itiiyBtdfcskbai: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_9_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_DDnhmxjezrilein vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_DDnhmxjezrilein L$_16_blocks_overflow_DDnhmxjezrilein: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_DDnhmxjezrilein: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_bBBEnlialjlpfsp subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_bBBEnlialjlpfsp L$_small_initial_partial_block_bBBEnlialjlpfsp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bBBEnlialjlpfsp: orq %r8,%r8 je L$_after_reduction_bBBEnlialjlpfsp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_bBBEnlialjlpfsp: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_10_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_zCijhbGCeraapou vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_zCijhbGCeraapou L$_16_blocks_overflow_zCijhbGCeraapou: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_zCijhbGCeraapou: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DzFChhwqqhyhjhC subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DzFChhwqqhyhjhC L$_small_initial_partial_block_DzFChhwqqhyhjhC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DzFChhwqqhyhjhC: orq %r8,%r8 je L$_after_reduction_DzFChhwqqhyhjhC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DzFChhwqqhyhjhC: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_11_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_aafwvnrniBpBhGh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_aafwvnrniBpBhGh L$_16_blocks_overflow_aafwvnrniBpBhGh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_aafwvnrniBpBhGh: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ntDAaiasAzzqzla subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ntDAaiasAzzqzla L$_small_initial_partial_block_ntDAaiasAzzqzla: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ntDAaiasAzzqzla: orq %r8,%r8 je L$_after_reduction_ntDAaiasAzzqzla vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ntDAaiasAzzqzla: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_12_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_szlfmGmeuofoAra vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_szlfmGmeuofoAra L$_16_blocks_overflow_szlfmGmeuofoAra: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_szlfmGmeuofoAra: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FgCEuitmambDkxu subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FgCEuitmambDkxu L$_small_initial_partial_block_FgCEuitmambDkxu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FgCEuitmambDkxu: orq %r8,%r8 je L$_after_reduction_FgCEuitmambDkxu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FgCEuitmambDkxu: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_13_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_knBrwwsfezoBuDz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_knBrwwsfezoBuDz L$_16_blocks_overflow_knBrwwsfezoBuDz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_knBrwwsfezoBuDz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_szDFenAfBoEDgjz subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_szDFenAfBoEDgjz L$_small_initial_partial_block_szDFenAfBoEDgjz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_szDFenAfBoEDgjz: orq %r8,%r8 je L$_after_reduction_szDFenAfBoEDgjz vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_szDFenAfBoEDgjz: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_14_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_xfkAqxxGjDnhBjB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_xfkAqxxGjDnhBjB L$_16_blocks_overflow_xfkAqxxGjDnhBjB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_xfkAqxxGjDnhBjB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xpnwxzswluGFliu subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xpnwxzswluGFliu L$_small_initial_partial_block_xpnwxzswluGFliu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xpnwxzswluGFliu: orq %r8,%r8 je L$_after_reduction_xpnwxzswluGFliu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_xpnwxzswluGFliu: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_15_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_myvDpkrqCoAukhb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_myvDpkrqCoAukhb L$_16_blocks_overflow_myvDpkrqCoAukhb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_myvDpkrqCoAukhb: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jjDbyaqFmGmaiEB subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jjDbyaqFmGmaiEB L$_small_initial_partial_block_jjDbyaqFmGmaiEB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jjDbyaqFmGmaiEB: orq %r8,%r8 je L$_after_reduction_jjDbyaqFmGmaiEB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jjDbyaqFmGmaiEB: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_16_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_zEAEoetgkvqojFa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_zEAEoetgkvqojFa L$_16_blocks_overflow_zEAEoetgkvqojFa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_zEAEoetgkvqojFa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_erAoEayjDqpuhEu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_erAoEayjDqpuhEu: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_erAoEayjDqpuhEu: jmp L$_last_blocks_done_CfrpfvcjvvrcbGa L$_last_num_blocks_is_0_CfrpfvcjvvrcbGa: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_CfrpfvcjvvrcbGa: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_pzwgkGgbplFqzaB L$_encrypt_16_blocks_pzwgkGgbplFqzaB: cmpb $240,%r15b jae L$_16_blocks_overflow_rkcxrDqAhslhkiA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_rkcxrDqAhslhkiA L$_16_blocks_overflow_rkcxrDqAhslhkiA: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_rkcxrDqAhslhkiA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_aibBfiDGEtrGszv cmpl $8,%r10d je L$_last_num_blocks_is_8_aibBfiDGEtrGszv jb L$_last_num_blocks_is_7_1_aibBfiDGEtrGszv cmpl $12,%r10d je L$_last_num_blocks_is_12_aibBfiDGEtrGszv jb L$_last_num_blocks_is_11_9_aibBfiDGEtrGszv cmpl $15,%r10d je L$_last_num_blocks_is_15_aibBfiDGEtrGszv ja L$_last_num_blocks_is_16_aibBfiDGEtrGszv cmpl $14,%r10d je L$_last_num_blocks_is_14_aibBfiDGEtrGszv jmp L$_last_num_blocks_is_13_aibBfiDGEtrGszv L$_last_num_blocks_is_11_9_aibBfiDGEtrGszv: cmpl $10,%r10d je L$_last_num_blocks_is_10_aibBfiDGEtrGszv ja L$_last_num_blocks_is_11_aibBfiDGEtrGszv jmp L$_last_num_blocks_is_9_aibBfiDGEtrGszv L$_last_num_blocks_is_7_1_aibBfiDGEtrGszv: cmpl $4,%r10d je L$_last_num_blocks_is_4_aibBfiDGEtrGszv jb L$_last_num_blocks_is_3_1_aibBfiDGEtrGszv cmpl $6,%r10d ja L$_last_num_blocks_is_7_aibBfiDGEtrGszv je L$_last_num_blocks_is_6_aibBfiDGEtrGszv jmp L$_last_num_blocks_is_5_aibBfiDGEtrGszv L$_last_num_blocks_is_3_1_aibBfiDGEtrGszv: cmpl $2,%r10d ja L$_last_num_blocks_is_3_aibBfiDGEtrGszv je L$_last_num_blocks_is_2_aibBfiDGEtrGszv L$_last_num_blocks_is_1_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_CfAjeyGwbnghnsF vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_CfAjeyGwbnghnsF L$_16_blocks_overflow_CfAjeyGwbnghnsF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_CfAjeyGwbnghnsF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_EpCxqyApoFBApzn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EpCxqyApoFBApzn L$_small_initial_partial_block_EpCxqyApoFBApzn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_EpCxqyApoFBApzn L$_small_initial_compute_done_EpCxqyApoFBApzn: L$_after_reduction_EpCxqyApoFBApzn: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_2_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_sbkoxvmnmihnaig vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_sbkoxvmnmihnaig L$_16_blocks_overflow_sbkoxvmnmihnaig: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_sbkoxvmnmihnaig: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rlBeEnisjmybagx subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rlBeEnisjmybagx L$_small_initial_partial_block_rlBeEnisjmybagx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rlBeEnisjmybagx: orq %r8,%r8 je L$_after_reduction_rlBeEnisjmybagx vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rlBeEnisjmybagx: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_3_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_zopCCjajxtsjEdG vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_zopCCjajxtsjEdG L$_16_blocks_overflow_zopCCjajxtsjEdG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_zopCCjajxtsjEdG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hzwxdhlzEAlznGG subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hzwxdhlzEAlznGG L$_small_initial_partial_block_hzwxdhlzEAlznGG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hzwxdhlzEAlznGG: orq %r8,%r8 je L$_after_reduction_hzwxdhlzEAlznGG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_hzwxdhlzEAlznGG: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_4_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_utgfjaowycovqbp vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_utgfjaowycovqbp L$_16_blocks_overflow_utgfjaowycovqbp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_utgfjaowycovqbp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AslmndcqqeqAFer subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AslmndcqqeqAFer L$_small_initial_partial_block_AslmndcqqeqAFer: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AslmndcqqeqAFer: orq %r8,%r8 je L$_after_reduction_AslmndcqqeqAFer vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AslmndcqqeqAFer: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_5_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_wugoGjfryfqCjFa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_wugoGjfryfqCjFa L$_16_blocks_overflow_wugoGjfryfqCjFa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_wugoGjfryfqCjFa: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_CFkxkbxvkninECi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_CFkxkbxvkninECi L$_small_initial_partial_block_CFkxkbxvkninECi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_CFkxkbxvkninECi: orq %r8,%r8 je L$_after_reduction_CFkxkbxvkninECi vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_CFkxkbxvkninECi: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_6_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_bpCexfjrkbCbhBc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_bpCexfjrkbCbhBc L$_16_blocks_overflow_bpCexfjrkbCbhBc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_bpCexfjrkbCbhBc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ojmsEGarpmywurj subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ojmsEGarpmywurj L$_small_initial_partial_block_ojmsEGarpmywurj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ojmsEGarpmywurj: orq %r8,%r8 je L$_after_reduction_ojmsEGarpmywurj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ojmsEGarpmywurj: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_7_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_ifByzBizpdBxFnD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_ifByzBizpdBxFnD L$_16_blocks_overflow_ifByzBizpdBxFnD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_ifByzBizpdBxFnD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yEEshkytCfbpoyC subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yEEshkytCfbpoyC L$_small_initial_partial_block_yEEshkytCfbpoyC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yEEshkytCfbpoyC: orq %r8,%r8 je L$_after_reduction_yEEshkytCfbpoyC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yEEshkytCfbpoyC: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_8_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_cjwhqEvpCfjCcEa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_cjwhqEvpCfjCcEa L$_16_blocks_overflow_cjwhqEvpCfjCcEa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_cjwhqEvpCfjCcEa: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EqvthrGbiBgAmsm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EqvthrGbiBgAmsm L$_small_initial_partial_block_EqvthrGbiBgAmsm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EqvthrGbiBgAmsm: orq %r8,%r8 je L$_after_reduction_EqvthrGbiBgAmsm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EqvthrGbiBgAmsm: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_9_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_xiomBjDmsdhvtig vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_xiomBjDmsdhvtig L$_16_blocks_overflow_xiomBjDmsdhvtig: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_xiomBjDmsdhvtig: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mbfjpvagktvcgbq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mbfjpvagktvcgbq L$_small_initial_partial_block_mbfjpvagktvcgbq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mbfjpvagktvcgbq: orq %r8,%r8 je L$_after_reduction_mbfjpvagktvcgbq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_mbfjpvagktvcgbq: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_10_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_cEyikykuFcExlBe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_cEyikykuFcExlBe L$_16_blocks_overflow_cEyikykuFcExlBe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_cEyikykuFcExlBe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zkabbaDExfgmaqw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zkabbaDExfgmaqw L$_small_initial_partial_block_zkabbaDExfgmaqw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zkabbaDExfgmaqw: orq %r8,%r8 je L$_after_reduction_zkabbaDExfgmaqw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_zkabbaDExfgmaqw: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_11_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_gsBoGfzrmwqlomo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_gsBoGfzrmwqlomo L$_16_blocks_overflow_gsBoGfzrmwqlomo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_gsBoGfzrmwqlomo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jaixjmwppjCmscj subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jaixjmwppjCmscj L$_small_initial_partial_block_jaixjmwppjCmscj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jaixjmwppjCmscj: orq %r8,%r8 je L$_after_reduction_jaixjmwppjCmscj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jaixjmwppjCmscj: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_12_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_CAvgqgqjrtonFws vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_CAvgqgqjrtonFws L$_16_blocks_overflow_CAvgqgqjrtonFws: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_CAvgqgqjrtonFws: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hvmFFygfifAjAnG subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hvmFFygfifAjAnG L$_small_initial_partial_block_hvmFFygfifAjAnG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hvmFFygfifAjAnG: orq %r8,%r8 je L$_after_reduction_hvmFFygfifAjAnG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_hvmFFygfifAjAnG: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_13_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_zqBffksAbxFoiFr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_zqBffksAbxFoiFr L$_16_blocks_overflow_zqBffksAbxFoiFr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_zqBffksAbxFoiFr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kmvbbtEzBEoeAuq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kmvbbtEzBEoeAuq L$_small_initial_partial_block_kmvbbtEzBEoeAuq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kmvbbtEzBEoeAuq: orq %r8,%r8 je L$_after_reduction_kmvbbtEzBEoeAuq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kmvbbtEzBEoeAuq: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_14_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_mBiifnhuGFDpfDy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_mBiifnhuGFDpfDy L$_16_blocks_overflow_mBiifnhuGFDpfDy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_mBiifnhuGFDpfDy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_imGnxqypsDyhyek subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_imGnxqypsDyhyek L$_small_initial_partial_block_imGnxqypsDyhyek: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_imGnxqypsDyhyek: orq %r8,%r8 je L$_after_reduction_imGnxqypsDyhyek vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_imGnxqypsDyhyek: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_15_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_zDGlqyFvuaglkeB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_zDGlqyFvuaglkeB L$_16_blocks_overflow_zDGlqyFvuaglkeB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_zDGlqyFvuaglkeB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BuGprjrzjxrmorl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BuGprjrzjxrmorl L$_small_initial_partial_block_BuGprjrzjxrmorl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BuGprjrzjxrmorl: orq %r8,%r8 je L$_after_reduction_BuGprjrzjxrmorl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BuGprjrzjxrmorl: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_16_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_uwtqqfwgewBdjhg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_uwtqqfwgewBdjhg L$_16_blocks_overflow_uwtqqfwgewBdjhg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_uwtqqfwgewBdjhg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_ifytbdtuElzEqkG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ifytbdtuElzEqkG: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ifytbdtuElzEqkG: jmp L$_last_blocks_done_aibBfiDGEtrGszv L$_last_num_blocks_is_0_aibBfiDGEtrGszv: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_aibBfiDGEtrGszv: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_pzwgkGgbplFqzaB L$_message_below_32_blocks_pzwgkGgbplFqzaB: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_mgjxphyGhnqeEta vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) L$_skip_hkeys_precomputation_mgjxphyGhnqeEta: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_FjCtcrwcdAhCtrr cmpl $8,%r10d je L$_last_num_blocks_is_8_FjCtcrwcdAhCtrr jb L$_last_num_blocks_is_7_1_FjCtcrwcdAhCtrr cmpl $12,%r10d je L$_last_num_blocks_is_12_FjCtcrwcdAhCtrr jb L$_last_num_blocks_is_11_9_FjCtcrwcdAhCtrr cmpl $15,%r10d je L$_last_num_blocks_is_15_FjCtcrwcdAhCtrr ja L$_last_num_blocks_is_16_FjCtcrwcdAhCtrr cmpl $14,%r10d je L$_last_num_blocks_is_14_FjCtcrwcdAhCtrr jmp L$_last_num_blocks_is_13_FjCtcrwcdAhCtrr L$_last_num_blocks_is_11_9_FjCtcrwcdAhCtrr: cmpl $10,%r10d je L$_last_num_blocks_is_10_FjCtcrwcdAhCtrr ja L$_last_num_blocks_is_11_FjCtcrwcdAhCtrr jmp L$_last_num_blocks_is_9_FjCtcrwcdAhCtrr L$_last_num_blocks_is_7_1_FjCtcrwcdAhCtrr: cmpl $4,%r10d je L$_last_num_blocks_is_4_FjCtcrwcdAhCtrr jb L$_last_num_blocks_is_3_1_FjCtcrwcdAhCtrr cmpl $6,%r10d ja L$_last_num_blocks_is_7_FjCtcrwcdAhCtrr je L$_last_num_blocks_is_6_FjCtcrwcdAhCtrr jmp L$_last_num_blocks_is_5_FjCtcrwcdAhCtrr L$_last_num_blocks_is_3_1_FjCtcrwcdAhCtrr: cmpl $2,%r10d ja L$_last_num_blocks_is_3_FjCtcrwcdAhCtrr je L$_last_num_blocks_is_2_FjCtcrwcdAhCtrr L$_last_num_blocks_is_1_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_zCjdttbyboeGxFb vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_zCjdttbyboeGxFb L$_16_blocks_overflow_zCjdttbyboeGxFb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_zCjdttbyboeGxFb: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_ojiwxsAElGDCCBo subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ojiwxsAElGDCCBo L$_small_initial_partial_block_ojiwxsAElGDCCBo: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_ojiwxsAElGDCCBo L$_small_initial_compute_done_ojiwxsAElGDCCBo: L$_after_reduction_ojiwxsAElGDCCBo: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_2_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_fhFvhqpaozkgyzE vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_fhFvhqpaozkgyzE L$_16_blocks_overflow_fhFvhqpaozkgyzE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_fhFvhqpaozkgyzE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_poknuzddusxymkw subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_poknuzddusxymkw L$_small_initial_partial_block_poknuzddusxymkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_poknuzddusxymkw: orq %r8,%r8 je L$_after_reduction_poknuzddusxymkw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_poknuzddusxymkw: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_3_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_hjBmpccGhruhCnv vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_hjBmpccGhruhCnv L$_16_blocks_overflow_hjBmpccGhruhCnv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_hjBmpccGhruhCnv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yshcwAFsbqgougy subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yshcwAFsbqgougy L$_small_initial_partial_block_yshcwAFsbqgougy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yshcwAFsbqgougy: orq %r8,%r8 je L$_after_reduction_yshcwAFsbqgougy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yshcwAFsbqgougy: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_4_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_bBrsEuBDcsAcscn vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_bBrsEuBDcsAcscn L$_16_blocks_overflow_bBrsEuBDcsAcscn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_bBrsEuBDcsAcscn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_bDghuGEnDqEshwp subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_bDghuGEnDqEshwp L$_small_initial_partial_block_bDghuGEnDqEshwp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bDghuGEnDqEshwp: orq %r8,%r8 je L$_after_reduction_bDghuGEnDqEshwp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_bDghuGEnDqEshwp: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_5_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_nygdGeFptfwzvpw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_nygdGeFptfwzvpw L$_16_blocks_overflow_nygdGeFptfwzvpw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_nygdGeFptfwzvpw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dFGmpkoEnwhmCiq subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dFGmpkoEnwhmCiq L$_small_initial_partial_block_dFGmpkoEnwhmCiq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dFGmpkoEnwhmCiq: orq %r8,%r8 je L$_after_reduction_dFGmpkoEnwhmCiq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dFGmpkoEnwhmCiq: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_6_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_adtbeheumiAkmlw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_adtbeheumiAkmlw L$_16_blocks_overflow_adtbeheumiAkmlw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_adtbeheumiAkmlw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nDqCwyzuFDuivbj subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nDqCwyzuFDuivbj L$_small_initial_partial_block_nDqCwyzuFDuivbj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nDqCwyzuFDuivbj: orq %r8,%r8 je L$_after_reduction_nDqCwyzuFDuivbj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nDqCwyzuFDuivbj: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_7_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_aDdoAskralEtovy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_aDdoAskralEtovy L$_16_blocks_overflow_aDdoAskralEtovy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_aDdoAskralEtovy: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GmgCerxizidGGeG subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GmgCerxizidGGeG L$_small_initial_partial_block_GmgCerxizidGGeG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GmgCerxizidGGeG: orq %r8,%r8 je L$_after_reduction_GmgCerxizidGGeG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GmgCerxizidGGeG: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_8_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_hjBdmnrbjjzAbzC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_hjBdmnrbjjzAbzC L$_16_blocks_overflow_hjBdmnrbjjzAbzC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_hjBdmnrbjjzAbzC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_oukGaFAnaceFaaB subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_oukGaFAnaceFaaB L$_small_initial_partial_block_oukGaFAnaceFaaB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_oukGaFAnaceFaaB: orq %r8,%r8 je L$_after_reduction_oukGaFAnaceFaaB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_oukGaFAnaceFaaB: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_9_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_szBmuqzxwjxBawF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_szBmuqzxwjxBawF L$_16_blocks_overflow_szBmuqzxwjxBawF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_szBmuqzxwjxBawF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_paBklhesgEuGBAF subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_paBklhesgEuGBAF L$_small_initial_partial_block_paBklhesgEuGBAF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_paBklhesgEuGBAF: orq %r8,%r8 je L$_after_reduction_paBklhesgEuGBAF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_paBklhesgEuGBAF: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_10_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_xhlcvtlyGczsicp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_xhlcvtlyGczsicp L$_16_blocks_overflow_xhlcvtlyGczsicp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_xhlcvtlyGczsicp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nytpeiDsozzjuGs subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nytpeiDsozzjuGs L$_small_initial_partial_block_nytpeiDsozzjuGs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nytpeiDsozzjuGs: orq %r8,%r8 je L$_after_reduction_nytpeiDsozzjuGs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nytpeiDsozzjuGs: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_11_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_CkhBiupnDlzBoGx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_CkhBiupnDlzBoGx L$_16_blocks_overflow_CkhBiupnDlzBoGx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_CkhBiupnDlzBoGx: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BsdepnxnqoCzhkf subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BsdepnxnqoCzhkf L$_small_initial_partial_block_BsdepnxnqoCzhkf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BsdepnxnqoCzhkf: orq %r8,%r8 je L$_after_reduction_BsdepnxnqoCzhkf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BsdepnxnqoCzhkf: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_12_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_svvcxnisrDiilsD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_svvcxnisrDiilsD L$_16_blocks_overflow_svvcxnisrDiilsD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_svvcxnisrDiilsD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vFAcldEivdmCjng subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vFAcldEivdmCjng L$_small_initial_partial_block_vFAcldEivdmCjng: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vFAcldEivdmCjng: orq %r8,%r8 je L$_after_reduction_vFAcldEivdmCjng vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vFAcldEivdmCjng: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_13_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_oDDmorFzihnoffg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_oDDmorFzihnoffg L$_16_blocks_overflow_oDDmorFzihnoffg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_oDDmorFzihnoffg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yccbCzjnDwADEak subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yccbCzjnDwADEak L$_small_initial_partial_block_yccbCzjnDwADEak: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yccbCzjnDwADEak: orq %r8,%r8 je L$_after_reduction_yccbCzjnDwADEak vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yccbCzjnDwADEak: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_14_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_liipuseeafvnkfi vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_liipuseeafvnkfi L$_16_blocks_overflow_liipuseeafvnkfi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_liipuseeafvnkfi: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BentjlpjfFDzvxb subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BentjlpjfFDzvxb L$_small_initial_partial_block_BentjlpjfFDzvxb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BentjlpjfFDzvxb: orq %r8,%r8 je L$_after_reduction_BentjlpjfFDzvxb vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BentjlpjfFDzvxb: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_15_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_cuygxmuthGeaeby vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_cuygxmuthGeaeby L$_16_blocks_overflow_cuygxmuthGeaeby: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_cuygxmuthGeaeby: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qokhdigphzzzcxp subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qokhdigphzzzcxp L$_small_initial_partial_block_qokhdigphzzzcxp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qokhdigphzzzcxp: orq %r8,%r8 je L$_after_reduction_qokhdigphzzzcxp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qokhdigphzzzcxp: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_16_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_AgkAgztElEpGqer vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_AgkAgztElEpGqer L$_16_blocks_overflow_AgkAgztElEpGqer: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_AgkAgztElEpGqer: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_eruDeitqttsEEhG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_eruDeitqttsEEhG: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_eruDeitqttsEEhG: jmp L$_last_blocks_done_FjCtcrwcdAhCtrr L$_last_num_blocks_is_0_FjCtcrwcdAhCtrr: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_FjCtcrwcdAhCtrr: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_pzwgkGgbplFqzaB L$_message_below_equal_16_blocks_pzwgkGgbplFqzaB: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je L$_small_initial_num_blocks_is_8_Arjlgemsqpaxhfj jl L$_small_initial_num_blocks_is_7_1_Arjlgemsqpaxhfj cmpq $12,%r12 je L$_small_initial_num_blocks_is_12_Arjlgemsqpaxhfj jl L$_small_initial_num_blocks_is_11_9_Arjlgemsqpaxhfj cmpq $16,%r12 je L$_small_initial_num_blocks_is_16_Arjlgemsqpaxhfj cmpq $15,%r12 je L$_small_initial_num_blocks_is_15_Arjlgemsqpaxhfj cmpq $14,%r12 je L$_small_initial_num_blocks_is_14_Arjlgemsqpaxhfj jmp L$_small_initial_num_blocks_is_13_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_11_9_Arjlgemsqpaxhfj: cmpq $11,%r12 je L$_small_initial_num_blocks_is_11_Arjlgemsqpaxhfj cmpq $10,%r12 je L$_small_initial_num_blocks_is_10_Arjlgemsqpaxhfj jmp L$_small_initial_num_blocks_is_9_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_7_1_Arjlgemsqpaxhfj: cmpq $4,%r12 je L$_small_initial_num_blocks_is_4_Arjlgemsqpaxhfj jl L$_small_initial_num_blocks_is_3_1_Arjlgemsqpaxhfj cmpq $7,%r12 je L$_small_initial_num_blocks_is_7_Arjlgemsqpaxhfj cmpq $6,%r12 je L$_small_initial_num_blocks_is_6_Arjlgemsqpaxhfj jmp L$_small_initial_num_blocks_is_5_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_3_1_Arjlgemsqpaxhfj: cmpq $3,%r12 je L$_small_initial_num_blocks_is_3_Arjlgemsqpaxhfj cmpq $2,%r12 je L$_small_initial_num_blocks_is_2_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_1_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_qFFkbngiCspnnzb subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qFFkbngiCspnnzb L$_small_initial_partial_block_qFFkbngiCspnnzb: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp L$_after_reduction_qFFkbngiCspnnzb L$_small_initial_compute_done_qFFkbngiCspnnzb: L$_after_reduction_qFFkbngiCspnnzb: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_2_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vcznqnCBEluErfz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vcznqnCBEluErfz L$_small_initial_partial_block_vcznqnCBEluErfz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vcznqnCBEluErfz: orq %r8,%r8 je L$_after_reduction_vcznqnCBEluErfz vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_vcznqnCBEluErfz: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_3_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lxlwCnecElggboh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lxlwCnecElggboh L$_small_initial_partial_block_lxlwCnecElggboh: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lxlwCnecElggboh: orq %r8,%r8 je L$_after_reduction_lxlwCnecElggboh vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_lxlwCnecElggboh: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_4_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_uwbAugwxtaEtqkm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_uwbAugwxtaEtqkm L$_small_initial_partial_block_uwbAugwxtaEtqkm: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_uwbAugwxtaEtqkm: orq %r8,%r8 je L$_after_reduction_uwbAugwxtaEtqkm vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_uwbAugwxtaEtqkm: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_5_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %xmm29,%xmm3,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DlAbwtibuwDuckF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DlAbwtibuwDuckF L$_small_initial_partial_block_DlAbwtibuwDuckF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DlAbwtibuwDuckF: orq %r8,%r8 je L$_after_reduction_DlAbwtibuwDuckF vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_DlAbwtibuwDuckF: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_6_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %ymm29,%ymm3,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_bGfevqujtGrmyqw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_bGfevqujtGrmyqw L$_small_initial_partial_block_bGfevqujtGrmyqw: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bGfevqujtGrmyqw: orq %r8,%r8 je L$_after_reduction_bGfevqujtGrmyqw vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_bGfevqujtGrmyqw: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_7_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BDbECEkpAEccDln subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BDbECEkpAEccDln L$_small_initial_partial_block_BDbECEkpAEccDln: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BDbECEkpAEccDln: orq %r8,%r8 je L$_after_reduction_BDbECEkpAEccDln vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_BDbECEkpAEccDln: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_8_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ngynpdbFzwtiwpp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ngynpdbFzwtiwpp L$_small_initial_partial_block_ngynpdbFzwtiwpp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ngynpdbFzwtiwpp: orq %r8,%r8 je L$_after_reduction_ngynpdbFzwtiwpp vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_ngynpdbFzwtiwpp: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_9_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %xmm29,%xmm4,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_bxCwlFCulijpvoi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_bxCwlFCulijpvoi L$_small_initial_partial_block_bxCwlFCulijpvoi: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bxCwlFCulijpvoi: orq %r8,%r8 je L$_after_reduction_bxCwlFCulijpvoi vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_bxCwlFCulijpvoi: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_10_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %ymm29,%ymm4,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kAlkoicirsyCsoA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kAlkoicirsyCsoA L$_small_initial_partial_block_kAlkoicirsyCsoA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kAlkoicirsyCsoA: orq %r8,%r8 je L$_after_reduction_kAlkoicirsyCsoA vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_kAlkoicirsyCsoA: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_11_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rsxtmscApkaFsGk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rsxtmscApkaFsGk L$_small_initial_partial_block_rsxtmscApkaFsGk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rsxtmscApkaFsGk: orq %r8,%r8 je L$_after_reduction_rsxtmscApkaFsGk vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_rsxtmscApkaFsGk: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_12_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_uytuqlquheEjDpf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_uytuqlquheEjDpf L$_small_initial_partial_block_uytuqlquheEjDpf: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_uytuqlquheEjDpf: orq %r8,%r8 je L$_after_reduction_uytuqlquheEjDpf vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_uytuqlquheEjDpf: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_13_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %xmm29,%xmm5,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AEjkiAmqCDcyaGF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AEjkiAmqCDcyaGF L$_small_initial_partial_block_AEjkiAmqCDcyaGF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AEjkiAmqCDcyaGF: orq %r8,%r8 je L$_after_reduction_AEjkiAmqCDcyaGF vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_AEjkiAmqCDcyaGF: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_14_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %ymm29,%ymm5,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xweDbtGBgzaynjE subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xweDbtGBgzaynjE L$_small_initial_partial_block_xweDbtGBgzaynjE: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xweDbtGBgzaynjE: orq %r8,%r8 je L$_after_reduction_xweDbtGBgzaynjE vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_xweDbtGBgzaynjE: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_15_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ffBpbkEzFalCAqm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ffBpbkEzFalCAqm L$_small_initial_partial_block_ffBpbkEzFalCAqm: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ffBpbkEzFalCAqm: orq %r8,%r8 je L$_after_reduction_ffBpbkEzFalCAqm vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_ffBpbkEzFalCAqm: jmp L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj L$_small_initial_num_blocks_is_16_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_icDnnqvChyBsuli: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_icDnnqvChyBsuli: vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_icDnnqvChyBsuli: L$_small_initial_blocks_encrypted_Arjlgemsqpaxhfj: L$_ghash_done_pzwgkGgbplFqzaB: vmovdqu64 %xmm2,0(%rsi) L$_enc_dec_done_pzwgkGgbplFqzaB: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) L$_enc_dec_abort_pzwgkGgbplFqzaB: jmp L$exit_gcm_encrypt .p2align 5 L$aes_gcm_encrypt_192_avx512: orq %r8,%r8 je L$_enc_dec_abort_jzxBnczDBxGvzop xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je L$_partial_block_done_CoBypAsApBwqcnx movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge L$_no_extra_mask_CoBypAsApBwqcnx subq %r13,%r12 L$_no_extra_mask_CoBypAsApBwqcnx: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm3,%xmm14,%xmm14 cmpq $0,%r13 jl L$_partial_incomplete_CoBypAsApBwqcnx .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp L$_enc_dec_done_CoBypAsApBwqcnx L$_partial_incomplete_CoBypAsApBwqcnx: addl %r8d,(%rdx) movq %r8,%rax L$_enc_dec_done_CoBypAsApBwqcnx: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} L$_partial_block_done_CoBypAsApBwqcnx: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je L$_enc_dec_done_jzxBnczDBxGvzop cmpq $256,%r8 jbe L$_message_below_equal_16_blocks_jzxBnczDBxGvzop vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae L$_next_16_overflow_lelEEvckqsGkuGn vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_lelEEvckqsGkuGn L$_next_16_overflow_lelEEvckqsGkuGn: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_lelEEvckqsGkuGn: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_EclAduckuFhozAl vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) L$_skip_hkeys_precomputation_EclAduckuFhozAl: cmpq $512,%r8 jb L$_message_below_32_blocks_jzxBnczDBxGvzop cmpb $240,%r15b jae L$_next_16_overflow_hzduBGFfzuzeflu vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_hzduBGFfzuzeflu L$_next_16_overflow_hzduBGFfzuzeflu: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_hzduBGFfzuzeflu: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz L$_skip_hkeys_precomputation_vFCorhCAmhdDCzm vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) L$_skip_hkeys_precomputation_vFCorhCAmhdDCzm: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb L$_no_more_big_nblocks_jzxBnczDBxGvzop L$_encrypt_big_nblocks_jzxBnczDBxGvzop: cmpb $240,%r15b jae L$_16_blocks_overflow_tbpqxctvntvnomu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_tbpqxctvntvnomu L$_16_blocks_overflow_tbpqxctvntvnomu: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_tbpqxctvntvnomu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_oaDubdDhvdaaGvl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_oaDubdDhvdaaGvl L$_16_blocks_overflow_oaDubdDhvdaaGvl: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_oaDubdDhvdaaGvl: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_BEBEkieDehCjfpg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_BEBEkieDehCjfpg L$_16_blocks_overflow_BEBEkieDehCjfpg: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_BEBEkieDehCjfpg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae L$_encrypt_big_nblocks_jzxBnczDBxGvzop L$_no_more_big_nblocks_jzxBnczDBxGvzop: cmpq $512,%r8 jae L$_encrypt_32_blocks_jzxBnczDBxGvzop cmpq $256,%r8 jae L$_encrypt_16_blocks_jzxBnczDBxGvzop L$_encrypt_0_blocks_ghash_32_jzxBnczDBxGvzop: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_BsvdFlCzDbBougk cmpl $8,%r10d je L$_last_num_blocks_is_8_BsvdFlCzDbBougk jb L$_last_num_blocks_is_7_1_BsvdFlCzDbBougk cmpl $12,%r10d je L$_last_num_blocks_is_12_BsvdFlCzDbBougk jb L$_last_num_blocks_is_11_9_BsvdFlCzDbBougk cmpl $15,%r10d je L$_last_num_blocks_is_15_BsvdFlCzDbBougk ja L$_last_num_blocks_is_16_BsvdFlCzDbBougk cmpl $14,%r10d je L$_last_num_blocks_is_14_BsvdFlCzDbBougk jmp L$_last_num_blocks_is_13_BsvdFlCzDbBougk L$_last_num_blocks_is_11_9_BsvdFlCzDbBougk: cmpl $10,%r10d je L$_last_num_blocks_is_10_BsvdFlCzDbBougk ja L$_last_num_blocks_is_11_BsvdFlCzDbBougk jmp L$_last_num_blocks_is_9_BsvdFlCzDbBougk L$_last_num_blocks_is_7_1_BsvdFlCzDbBougk: cmpl $4,%r10d je L$_last_num_blocks_is_4_BsvdFlCzDbBougk jb L$_last_num_blocks_is_3_1_BsvdFlCzDbBougk cmpl $6,%r10d ja L$_last_num_blocks_is_7_BsvdFlCzDbBougk je L$_last_num_blocks_is_6_BsvdFlCzDbBougk jmp L$_last_num_blocks_is_5_BsvdFlCzDbBougk L$_last_num_blocks_is_3_1_BsvdFlCzDbBougk: cmpl $2,%r10d ja L$_last_num_blocks_is_3_BsvdFlCzDbBougk je L$_last_num_blocks_is_2_BsvdFlCzDbBougk L$_last_num_blocks_is_1_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_czjqmrcuGbkhjtu vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_czjqmrcuGbkhjtu L$_16_blocks_overflow_czjqmrcuGbkhjtu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_czjqmrcuGbkhjtu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_kFnqfsluDrycrwr subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kFnqfsluDrycrwr L$_small_initial_partial_block_kFnqfsluDrycrwr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_kFnqfsluDrycrwr L$_small_initial_compute_done_kFnqfsluDrycrwr: L$_after_reduction_kFnqfsluDrycrwr: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_2_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_tCDuaqxntEtBCqr vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_tCDuaqxntEtBCqr L$_16_blocks_overflow_tCDuaqxntEtBCqr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_tCDuaqxntEtBCqr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nrarlmFvApvbzxy subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nrarlmFvApvbzxy L$_small_initial_partial_block_nrarlmFvApvbzxy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nrarlmFvApvbzxy: orq %r8,%r8 je L$_after_reduction_nrarlmFvApvbzxy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nrarlmFvApvbzxy: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_3_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_AxfvkflbDBEFEmp vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_AxfvkflbDBEFEmp L$_16_blocks_overflow_AxfvkflbDBEFEmp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_AxfvkflbDBEFEmp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wasBAmmrjbGbemo subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wasBAmmrjbGbemo L$_small_initial_partial_block_wasBAmmrjbGbemo: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wasBAmmrjbGbemo: orq %r8,%r8 je L$_after_reduction_wasBAmmrjbGbemo vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wasBAmmrjbGbemo: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_4_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_xkpgotEfuidCEnC vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_xkpgotEfuidCEnC L$_16_blocks_overflow_xkpgotEfuidCEnC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_xkpgotEfuidCEnC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DvcssyjwzrqmFlE subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DvcssyjwzrqmFlE L$_small_initial_partial_block_DvcssyjwzrqmFlE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DvcssyjwzrqmFlE: orq %r8,%r8 je L$_after_reduction_DvcssyjwzrqmFlE vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DvcssyjwzrqmFlE: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_5_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_pxAyyxhuewraobh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_pxAyyxhuewraobh L$_16_blocks_overflow_pxAyyxhuewraobh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_pxAyyxhuewraobh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_Eawjwfemrjotopq subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_Eawjwfemrjotopq L$_small_initial_partial_block_Eawjwfemrjotopq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_Eawjwfemrjotopq: orq %r8,%r8 je L$_after_reduction_Eawjwfemrjotopq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_Eawjwfemrjotopq: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_6_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_rlBkdasaFkzjByu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_rlBkdasaFkzjByu L$_16_blocks_overflow_rlBkdasaFkzjByu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_rlBkdasaFkzjByu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AesejBFGrhphEgi subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AesejBFGrhphEgi L$_small_initial_partial_block_AesejBFGrhphEgi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AesejBFGrhphEgi: orq %r8,%r8 je L$_after_reduction_AesejBFGrhphEgi vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AesejBFGrhphEgi: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_7_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_keqkskoubnuElfA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_keqkskoubnuElfA L$_16_blocks_overflow_keqkskoubnuElfA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_keqkskoubnuElfA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BxDABaeeqkhilCj subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BxDABaeeqkhilCj L$_small_initial_partial_block_BxDABaeeqkhilCj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BxDABaeeqkhilCj: orq %r8,%r8 je L$_after_reduction_BxDABaeeqkhilCj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BxDABaeeqkhilCj: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_8_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_hwCFDDlqwBqrdyx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_hwCFDDlqwBqrdyx L$_16_blocks_overflow_hwCFDDlqwBqrdyx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_hwCFDDlqwBqrdyx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BeuuFnmEliqBmCs subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BeuuFnmEliqBmCs L$_small_initial_partial_block_BeuuFnmEliqBmCs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BeuuFnmEliqBmCs: orq %r8,%r8 je L$_after_reduction_BeuuFnmEliqBmCs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BeuuFnmEliqBmCs: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_9_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_ybEEnfpGmbdDyaC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_ybEEnfpGmbdDyaC L$_16_blocks_overflow_ybEEnfpGmbdDyaC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_ybEEnfpGmbdDyaC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_bDrrnAatcuCrjCa subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_bDrrnAatcuCrjCa L$_small_initial_partial_block_bDrrnAatcuCrjCa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bDrrnAatcuCrjCa: orq %r8,%r8 je L$_after_reduction_bDrrnAatcuCrjCa vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_bDrrnAatcuCrjCa: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_10_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_opfbCaznAiAepnv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_opfbCaznAiAepnv L$_16_blocks_overflow_opfbCaznAiAepnv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_opfbCaznAiAepnv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_CsmvpucAbBEBcvl subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_CsmvpucAbBEBcvl L$_small_initial_partial_block_CsmvpucAbBEBcvl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_CsmvpucAbBEBcvl: orq %r8,%r8 je L$_after_reduction_CsmvpucAbBEBcvl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_CsmvpucAbBEBcvl: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_11_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_qxFolltldGnscDg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_qxFolltldGnscDg L$_16_blocks_overflow_qxFolltldGnscDg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_qxFolltldGnscDg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AaGweewAhEribny subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AaGweewAhEribny L$_small_initial_partial_block_AaGweewAhEribny: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AaGweewAhEribny: orq %r8,%r8 je L$_after_reduction_AaGweewAhEribny vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AaGweewAhEribny: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_12_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_nvmdGffBdmtukpe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_nvmdGffBdmtukpe L$_16_blocks_overflow_nvmdGffBdmtukpe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_nvmdGffBdmtukpe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FoabkbEhqjtqagB subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FoabkbEhqjtqagB L$_small_initial_partial_block_FoabkbEhqjtqagB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FoabkbEhqjtqagB: orq %r8,%r8 je L$_after_reduction_FoabkbEhqjtqagB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FoabkbEhqjtqagB: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_13_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_zGEqEwwbyegFygC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_zGEqEwwbyegFygC L$_16_blocks_overflow_zGEqEwwbyegFygC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_zGEqEwwbyegFygC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_gjvieAerDfDGsxy subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_gjvieAerDfDGsxy L$_small_initial_partial_block_gjvieAerDfDGsxy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_gjvieAerDfDGsxy: orq %r8,%r8 je L$_after_reduction_gjvieAerDfDGsxy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_gjvieAerDfDGsxy: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_14_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_hGfdBnfArvqgnDo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_hGfdBnfArvqgnDo L$_16_blocks_overflow_hGfdBnfArvqgnDo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_hGfdBnfArvqgnDo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_faDbEijoauEqsyG subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_faDbEijoauEqsyG L$_small_initial_partial_block_faDbEijoauEqsyG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_faDbEijoauEqsyG: orq %r8,%r8 je L$_after_reduction_faDbEijoauEqsyG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_faDbEijoauEqsyG: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_15_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_AhbxhfFAjAuyeFk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_AhbxhfFAjAuyeFk L$_16_blocks_overflow_AhbxhfFAjAuyeFk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_AhbxhfFAjAuyeFk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_sxxFbklDpjCfEvm subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_sxxFbklDpjCfEvm L$_small_initial_partial_block_sxxFbklDpjCfEvm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_sxxFbklDpjCfEvm: orq %r8,%r8 je L$_after_reduction_sxxFbklDpjCfEvm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_sxxFbklDpjCfEvm: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_16_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_njybzcioxuyaaaD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_njybzcioxuyaaaD L$_16_blocks_overflow_njybzcioxuyaaaD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_njybzcioxuyaaaD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_bCffyflcoaBxCzy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bCffyflcoaBxCzy: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_bCffyflcoaBxCzy: jmp L$_last_blocks_done_BsvdFlCzDbBougk L$_last_num_blocks_is_0_BsvdFlCzDbBougk: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_BsvdFlCzDbBougk: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_jzxBnczDBxGvzop L$_encrypt_32_blocks_jzxBnczDBxGvzop: cmpb $240,%r15b jae L$_16_blocks_overflow_wafuliacDuosCms vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_wafuliacDuosCms L$_16_blocks_overflow_wafuliacDuosCms: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_wafuliacDuosCms: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_kAejpmvyzczzucF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_kAejpmvyzczzucF L$_16_blocks_overflow_kAejpmvyzczzucF: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_kAejpmvyzczzucF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_vkvGtsckpeodFyd cmpl $8,%r10d je L$_last_num_blocks_is_8_vkvGtsckpeodFyd jb L$_last_num_blocks_is_7_1_vkvGtsckpeodFyd cmpl $12,%r10d je L$_last_num_blocks_is_12_vkvGtsckpeodFyd jb L$_last_num_blocks_is_11_9_vkvGtsckpeodFyd cmpl $15,%r10d je L$_last_num_blocks_is_15_vkvGtsckpeodFyd ja L$_last_num_blocks_is_16_vkvGtsckpeodFyd cmpl $14,%r10d je L$_last_num_blocks_is_14_vkvGtsckpeodFyd jmp L$_last_num_blocks_is_13_vkvGtsckpeodFyd L$_last_num_blocks_is_11_9_vkvGtsckpeodFyd: cmpl $10,%r10d je L$_last_num_blocks_is_10_vkvGtsckpeodFyd ja L$_last_num_blocks_is_11_vkvGtsckpeodFyd jmp L$_last_num_blocks_is_9_vkvGtsckpeodFyd L$_last_num_blocks_is_7_1_vkvGtsckpeodFyd: cmpl $4,%r10d je L$_last_num_blocks_is_4_vkvGtsckpeodFyd jb L$_last_num_blocks_is_3_1_vkvGtsckpeodFyd cmpl $6,%r10d ja L$_last_num_blocks_is_7_vkvGtsckpeodFyd je L$_last_num_blocks_is_6_vkvGtsckpeodFyd jmp L$_last_num_blocks_is_5_vkvGtsckpeodFyd L$_last_num_blocks_is_3_1_vkvGtsckpeodFyd: cmpl $2,%r10d ja L$_last_num_blocks_is_3_vkvGtsckpeodFyd je L$_last_num_blocks_is_2_vkvGtsckpeodFyd L$_last_num_blocks_is_1_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_BvAqyjatyidEnnt vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_BvAqyjatyidEnnt L$_16_blocks_overflow_BvAqyjatyidEnnt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_BvAqyjatyidEnnt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_gBeshkmzGvkmrAi subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_gBeshkmzGvkmrAi L$_small_initial_partial_block_gBeshkmzGvkmrAi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_gBeshkmzGvkmrAi L$_small_initial_compute_done_gBeshkmzGvkmrAi: L$_after_reduction_gBeshkmzGvkmrAi: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_2_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_nbawutokAutAqum vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_nbawutokAutAqum L$_16_blocks_overflow_nbawutokAutAqum: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_nbawutokAutAqum: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BlGfnlBkldmmFcw subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BlGfnlBkldmmFcw L$_small_initial_partial_block_BlGfnlBkldmmFcw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BlGfnlBkldmmFcw: orq %r8,%r8 je L$_after_reduction_BlGfnlBkldmmFcw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BlGfnlBkldmmFcw: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_3_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_CwkxGelBrtqaaxv vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_CwkxGelBrtqaaxv L$_16_blocks_overflow_CwkxGelBrtqaaxv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_CwkxGelBrtqaaxv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EEhEwlabesmvDev subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EEhEwlabesmvDev L$_small_initial_partial_block_EEhEwlabesmvDev: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EEhEwlabesmvDev: orq %r8,%r8 je L$_after_reduction_EEhEwlabesmvDev vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EEhEwlabesmvDev: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_4_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_gFpynBlybCeGalG vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_gFpynBlybCeGalG L$_16_blocks_overflow_gFpynBlybCeGalG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_gFpynBlybCeGalG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_gFxpzjaswtGGooa subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_gFxpzjaswtGGooa L$_small_initial_partial_block_gFxpzjaswtGGooa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_gFxpzjaswtGGooa: orq %r8,%r8 je L$_after_reduction_gFxpzjaswtGGooa vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_gFxpzjaswtGGooa: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_5_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_xwErcCwicbEwFqC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_xwErcCwicbEwFqC L$_16_blocks_overflow_xwErcCwicbEwFqC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_xwErcCwicbEwFqC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GaEkADDkkdyyuqC subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GaEkADDkkdyyuqC L$_small_initial_partial_block_GaEkADDkkdyyuqC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GaEkADDkkdyyuqC: orq %r8,%r8 je L$_after_reduction_GaEkADDkkdyyuqC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GaEkADDkkdyyuqC: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_6_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_baDecrAptncCCuf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_baDecrAptncCCuf L$_16_blocks_overflow_baDecrAptncCCuf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_baDecrAptncCCuf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GuszoBBsEjlucdt subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GuszoBBsEjlucdt L$_small_initial_partial_block_GuszoBBsEjlucdt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GuszoBBsEjlucdt: orq %r8,%r8 je L$_after_reduction_GuszoBBsEjlucdt vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GuszoBBsEjlucdt: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_7_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_iltrljarpeDchus vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_iltrljarpeDchus L$_16_blocks_overflow_iltrljarpeDchus: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_iltrljarpeDchus: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iGfglGojAckhaEr subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iGfglGojAckhaEr L$_small_initial_partial_block_iGfglGojAckhaEr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iGfglGojAckhaEr: orq %r8,%r8 je L$_after_reduction_iGfglGojAckhaEr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iGfglGojAckhaEr: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_8_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_eyzjCojxduufqEi vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_eyzjCojxduufqEi L$_16_blocks_overflow_eyzjCojxduufqEi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_eyzjCojxduufqEi: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hczvcmipanjdewG subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hczvcmipanjdewG L$_small_initial_partial_block_hczvcmipanjdewG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hczvcmipanjdewG: orq %r8,%r8 je L$_after_reduction_hczvcmipanjdewG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_hczvcmipanjdewG: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_9_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_bwdCwgCmnErFeDe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_bwdCwgCmnErFeDe L$_16_blocks_overflow_bwdCwgCmnErFeDe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_bwdCwgCmnErFeDe: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nczsEBrGqvtCBoe subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nczsEBrGqvtCBoe L$_small_initial_partial_block_nczsEBrGqvtCBoe: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nczsEBrGqvtCBoe: orq %r8,%r8 je L$_after_reduction_nczsEBrGqvtCBoe vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nczsEBrGqvtCBoe: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_10_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_eGGpBsfFnpwwbub vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_eGGpBsfFnpwwbub L$_16_blocks_overflow_eGGpBsfFnpwwbub: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_eGGpBsfFnpwwbub: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EcwCefEtlqcfEms subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EcwCefEtlqcfEms L$_small_initial_partial_block_EcwCefEtlqcfEms: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EcwCefEtlqcfEms: orq %r8,%r8 je L$_after_reduction_EcwCefEtlqcfEms vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EcwCefEtlqcfEms: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_11_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_eddhoEuAgjbBjFF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_eddhoEuAgjbBjFF L$_16_blocks_overflow_eddhoEuAgjbBjFF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_eddhoEuAgjbBjFF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wytgrCdaysqdDEF subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wytgrCdaysqdDEF L$_small_initial_partial_block_wytgrCdaysqdDEF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wytgrCdaysqdDEF: orq %r8,%r8 je L$_after_reduction_wytgrCdaysqdDEF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wytgrCdaysqdDEF: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_12_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_bfsFAnmADrmmioq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_bfsFAnmADrmmioq L$_16_blocks_overflow_bfsFAnmADrmmioq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_bfsFAnmADrmmioq: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_pzClwApspseFxiy subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pzClwApspseFxiy L$_small_initial_partial_block_pzClwApspseFxiy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pzClwApspseFxiy: orq %r8,%r8 je L$_after_reduction_pzClwApspseFxiy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_pzClwApspseFxiy: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_13_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_eqddxBoxqiwCsny vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_eqddxBoxqiwCsny L$_16_blocks_overflow_eqddxBoxqiwCsny: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_eqddxBoxqiwCsny: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jtCktBigdCvArrs subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jtCktBigdCvArrs L$_small_initial_partial_block_jtCktBigdCvArrs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jtCktBigdCvArrs: orq %r8,%r8 je L$_after_reduction_jtCktBigdCvArrs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jtCktBigdCvArrs: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_14_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_DAGxccpeauyqpCa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_DAGxccpeauyqpCa L$_16_blocks_overflow_DAGxccpeauyqpCa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_DAGxccpeauyqpCa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_eekywuGEAhgthae subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_eekywuGEAhgthae L$_small_initial_partial_block_eekywuGEAhgthae: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_eekywuGEAhgthae: orq %r8,%r8 je L$_after_reduction_eekywuGEAhgthae vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_eekywuGEAhgthae: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_15_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_xrzdkvEbdpatlsn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_xrzdkvEbdpatlsn L$_16_blocks_overflow_xrzdkvEbdpatlsn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_xrzdkvEbdpatlsn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nvxEscrdCznvhGj subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nvxEscrdCznvhGj L$_small_initial_partial_block_nvxEscrdCznvhGj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nvxEscrdCznvhGj: orq %r8,%r8 je L$_after_reduction_nvxEscrdCznvhGj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nvxEscrdCznvhGj: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_16_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_nhkzxmwsyGuskoi vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_nhkzxmwsyGuskoi L$_16_blocks_overflow_nhkzxmwsyGuskoi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_nhkzxmwsyGuskoi: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_ECtspjaqpoxwhnx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ECtspjaqpoxwhnx: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ECtspjaqpoxwhnx: jmp L$_last_blocks_done_vkvGtsckpeodFyd L$_last_num_blocks_is_0_vkvGtsckpeodFyd: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_vkvGtsckpeodFyd: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_jzxBnczDBxGvzop L$_encrypt_16_blocks_jzxBnczDBxGvzop: cmpb $240,%r15b jae L$_16_blocks_overflow_kkhtsxadreytpgc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_kkhtsxadreytpgc L$_16_blocks_overflow_kkhtsxadreytpgc: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_kkhtsxadreytpgc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_CGhwyqzCxCrDAod cmpl $8,%r10d je L$_last_num_blocks_is_8_CGhwyqzCxCrDAod jb L$_last_num_blocks_is_7_1_CGhwyqzCxCrDAod cmpl $12,%r10d je L$_last_num_blocks_is_12_CGhwyqzCxCrDAod jb L$_last_num_blocks_is_11_9_CGhwyqzCxCrDAod cmpl $15,%r10d je L$_last_num_blocks_is_15_CGhwyqzCxCrDAod ja L$_last_num_blocks_is_16_CGhwyqzCxCrDAod cmpl $14,%r10d je L$_last_num_blocks_is_14_CGhwyqzCxCrDAod jmp L$_last_num_blocks_is_13_CGhwyqzCxCrDAod L$_last_num_blocks_is_11_9_CGhwyqzCxCrDAod: cmpl $10,%r10d je L$_last_num_blocks_is_10_CGhwyqzCxCrDAod ja L$_last_num_blocks_is_11_CGhwyqzCxCrDAod jmp L$_last_num_blocks_is_9_CGhwyqzCxCrDAod L$_last_num_blocks_is_7_1_CGhwyqzCxCrDAod: cmpl $4,%r10d je L$_last_num_blocks_is_4_CGhwyqzCxCrDAod jb L$_last_num_blocks_is_3_1_CGhwyqzCxCrDAod cmpl $6,%r10d ja L$_last_num_blocks_is_7_CGhwyqzCxCrDAod je L$_last_num_blocks_is_6_CGhwyqzCxCrDAod jmp L$_last_num_blocks_is_5_CGhwyqzCxCrDAod L$_last_num_blocks_is_3_1_CGhwyqzCxCrDAod: cmpl $2,%r10d ja L$_last_num_blocks_is_3_CGhwyqzCxCrDAod je L$_last_num_blocks_is_2_CGhwyqzCxCrDAod L$_last_num_blocks_is_1_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_ycAFtgAvrzFpmud vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_ycAFtgAvrzFpmud L$_16_blocks_overflow_ycAFtgAvrzFpmud: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_ycAFtgAvrzFpmud: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_CbqAGqoFBCoBcnn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_CbqAGqoFBCoBcnn L$_small_initial_partial_block_CbqAGqoFBCoBcnn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_CbqAGqoFBCoBcnn L$_small_initial_compute_done_CbqAGqoFBCoBcnn: L$_after_reduction_CbqAGqoFBCoBcnn: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_2_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_unaFqvbBnCelmgG vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_unaFqvbBnCelmgG L$_16_blocks_overflow_unaFqvbBnCelmgG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_unaFqvbBnCelmgG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_gzvpemiEleCjEbC subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_gzvpemiEleCjEbC L$_small_initial_partial_block_gzvpemiEleCjEbC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_gzvpemiEleCjEbC: orq %r8,%r8 je L$_after_reduction_gzvpemiEleCjEbC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_gzvpemiEleCjEbC: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_3_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_FzufylrxyerzBEy vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_FzufylrxyerzBEy L$_16_blocks_overflow_FzufylrxyerzBEy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_FzufylrxyerzBEy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_gqwjyzltkrfhGvo subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_gqwjyzltkrfhGvo L$_small_initial_partial_block_gqwjyzltkrfhGvo: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_gqwjyzltkrfhGvo: orq %r8,%r8 je L$_after_reduction_gqwjyzltkrfhGvo vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_gqwjyzltkrfhGvo: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_4_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_FtupvahihsnvuAd vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_FtupvahihsnvuAd L$_16_blocks_overflow_FtupvahihsnvuAd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_FtupvahihsnvuAd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wBowoFhurirchGq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wBowoFhurirchGq L$_small_initial_partial_block_wBowoFhurirchGq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wBowoFhurirchGq: orq %r8,%r8 je L$_after_reduction_wBowoFhurirchGq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wBowoFhurirchGq: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_5_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_uBhGhomDazsjBak vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_uBhGhomDazsjBak L$_16_blocks_overflow_uBhGhomDazsjBak: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_uBhGhomDazsjBak: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_eFbGprqpsBhvBkh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_eFbGprqpsBhvBkh L$_small_initial_partial_block_eFbGprqpsBhvBkh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_eFbGprqpsBhvBkh: orq %r8,%r8 je L$_after_reduction_eFbGprqpsBhvBkh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_eFbGprqpsBhvBkh: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_6_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_mBfhrGpovoncBkc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_mBfhrGpovoncBkc L$_16_blocks_overflow_mBfhrGpovoncBkc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_mBfhrGpovoncBkc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qFilbDGEygcyzzw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qFilbDGEygcyzzw L$_small_initial_partial_block_qFilbDGEygcyzzw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qFilbDGEygcyzzw: orq %r8,%r8 je L$_after_reduction_qFilbDGEygcyzzw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qFilbDGEygcyzzw: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_7_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_FvpewBABrfyByvd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_FvpewBABrfyByvd L$_16_blocks_overflow_FvpewBABrfyByvd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_FvpewBABrfyByvd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vrmegiBFdzfFmfq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vrmegiBFdzfFmfq L$_small_initial_partial_block_vrmegiBFdzfFmfq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vrmegiBFdzfFmfq: orq %r8,%r8 je L$_after_reduction_vrmegiBFdzfFmfq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vrmegiBFdzfFmfq: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_8_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_FsoptjzAkrqyAAr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_FsoptjzAkrqyAAr L$_16_blocks_overflow_FsoptjzAkrqyAAr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_FsoptjzAkrqyAAr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vkFiBjCFtrykuwD subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vkFiBjCFtrykuwD L$_small_initial_partial_block_vkFiBjCFtrykuwD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vkFiBjCFtrykuwD: orq %r8,%r8 je L$_after_reduction_vkFiBjCFtrykuwD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vkFiBjCFtrykuwD: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_9_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_iABBxfvotBEkECx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_iABBxfvotBEkECx L$_16_blocks_overflow_iABBxfvotBEkECx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_iABBxfvotBEkECx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ypbbgpxgCctCtxy subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ypbbgpxgCctCtxy L$_small_initial_partial_block_ypbbgpxgCctCtxy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ypbbgpxgCctCtxy: orq %r8,%r8 je L$_after_reduction_ypbbgpxgCctCtxy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ypbbgpxgCctCtxy: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_10_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_jEngtqCkuniGdjp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_jEngtqCkuniGdjp L$_16_blocks_overflow_jEngtqCkuniGdjp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_jEngtqCkuniGdjp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nffhkznowjoDiCf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nffhkznowjoDiCf L$_small_initial_partial_block_nffhkznowjoDiCf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nffhkznowjoDiCf: orq %r8,%r8 je L$_after_reduction_nffhkznowjoDiCf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nffhkznowjoDiCf: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_11_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_beoirgaAxslixji vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_beoirgaAxslixji L$_16_blocks_overflow_beoirgaAxslixji: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_beoirgaAxslixji: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_cqzlemDcyGkhDnC subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_cqzlemDcyGkhDnC L$_small_initial_partial_block_cqzlemDcyGkhDnC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cqzlemDcyGkhDnC: orq %r8,%r8 je L$_after_reduction_cqzlemDcyGkhDnC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_cqzlemDcyGkhDnC: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_12_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_sxrCycfBickEpCs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_sxrCycfBickEpCs L$_16_blocks_overflow_sxrCycfBickEpCs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_sxrCycfBickEpCs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lzBzlrbzBeACuhk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lzBzlrbzBeACuhk L$_small_initial_partial_block_lzBzlrbzBeACuhk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lzBzlrbzBeACuhk: orq %r8,%r8 je L$_after_reduction_lzBzlrbzBeACuhk vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lzBzlrbzBeACuhk: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_13_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_sesGGmqiCkypotq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_sesGGmqiCkypotq L$_16_blocks_overflow_sesGGmqiCkypotq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_sesGGmqiCkypotq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qihphhEmthsffzk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qihphhEmthsffzk L$_small_initial_partial_block_qihphhEmthsffzk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qihphhEmthsffzk: orq %r8,%r8 je L$_after_reduction_qihphhEmthsffzk vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qihphhEmthsffzk: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_14_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_jqifyxAoeoxkDuE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_jqifyxAoeoxkDuE L$_16_blocks_overflow_jqifyxAoeoxkDuE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_jqifyxAoeoxkDuE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FkuwuuqBpnEvzkd subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FkuwuuqBpnEvzkd L$_small_initial_partial_block_FkuwuuqBpnEvzkd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FkuwuuqBpnEvzkd: orq %r8,%r8 je L$_after_reduction_FkuwuuqBpnEvzkd vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FkuwuuqBpnEvzkd: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_15_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_CBqhusrmEugbwks vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_CBqhusrmEugbwks L$_16_blocks_overflow_CBqhusrmEugbwks: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_CBqhusrmEugbwks: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qEmtvwDozjnABmp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qEmtvwDozjnABmp L$_small_initial_partial_block_qEmtvwDozjnABmp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qEmtvwDozjnABmp: orq %r8,%r8 je L$_after_reduction_qEmtvwDozjnABmp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qEmtvwDozjnABmp: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_16_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_hDfCleGEdmpzBiw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_hDfCleGEdmpzBiw L$_16_blocks_overflow_hDfCleGEdmpzBiw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_hDfCleGEdmpzBiw: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_pybhdxzahdqcprl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pybhdxzahdqcprl: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_pybhdxzahdqcprl: jmp L$_last_blocks_done_CGhwyqzCxCrDAod L$_last_num_blocks_is_0_CGhwyqzCxCrDAod: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_CGhwyqzCxCrDAod: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_jzxBnczDBxGvzop L$_message_below_32_blocks_jzxBnczDBxGvzop: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_dzmCrsBiciGnliE vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) L$_skip_hkeys_precomputation_dzmCrsBiciGnliE: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_aFwwehusEvmmDke cmpl $8,%r10d je L$_last_num_blocks_is_8_aFwwehusEvmmDke jb L$_last_num_blocks_is_7_1_aFwwehusEvmmDke cmpl $12,%r10d je L$_last_num_blocks_is_12_aFwwehusEvmmDke jb L$_last_num_blocks_is_11_9_aFwwehusEvmmDke cmpl $15,%r10d je L$_last_num_blocks_is_15_aFwwehusEvmmDke ja L$_last_num_blocks_is_16_aFwwehusEvmmDke cmpl $14,%r10d je L$_last_num_blocks_is_14_aFwwehusEvmmDke jmp L$_last_num_blocks_is_13_aFwwehusEvmmDke L$_last_num_blocks_is_11_9_aFwwehusEvmmDke: cmpl $10,%r10d je L$_last_num_blocks_is_10_aFwwehusEvmmDke ja L$_last_num_blocks_is_11_aFwwehusEvmmDke jmp L$_last_num_blocks_is_9_aFwwehusEvmmDke L$_last_num_blocks_is_7_1_aFwwehusEvmmDke: cmpl $4,%r10d je L$_last_num_blocks_is_4_aFwwehusEvmmDke jb L$_last_num_blocks_is_3_1_aFwwehusEvmmDke cmpl $6,%r10d ja L$_last_num_blocks_is_7_aFwwehusEvmmDke je L$_last_num_blocks_is_6_aFwwehusEvmmDke jmp L$_last_num_blocks_is_5_aFwwehusEvmmDke L$_last_num_blocks_is_3_1_aFwwehusEvmmDke: cmpl $2,%r10d ja L$_last_num_blocks_is_3_aFwwehusEvmmDke je L$_last_num_blocks_is_2_aFwwehusEvmmDke L$_last_num_blocks_is_1_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_BsFiEfmuvxGEGuk vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_BsFiEfmuvxGEGuk L$_16_blocks_overflow_BsFiEfmuvxGEGuk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_BsFiEfmuvxGEGuk: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_zEujlpbgqDyCdvt subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zEujlpbgqDyCdvt L$_small_initial_partial_block_zEujlpbgqDyCdvt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_zEujlpbgqDyCdvt L$_small_initial_compute_done_zEujlpbgqDyCdvt: L$_after_reduction_zEujlpbgqDyCdvt: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_2_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_DrefbggoCuhFosm vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_DrefbggoCuhFosm L$_16_blocks_overflow_DrefbggoCuhFosm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_DrefbggoCuhFosm: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rjkFEDDDoeuwufs subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rjkFEDDDoeuwufs L$_small_initial_partial_block_rjkFEDDDoeuwufs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rjkFEDDDoeuwufs: orq %r8,%r8 je L$_after_reduction_rjkFEDDDoeuwufs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rjkFEDDDoeuwufs: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_3_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_oskEeEmCEGeqECv vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_oskEeEmCEGeqECv L$_16_blocks_overflow_oskEeEmCEGeqECv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_oskEeEmCEGeqECv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_sgeerDwthydzyuy subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_sgeerDwthydzyuy L$_small_initial_partial_block_sgeerDwthydzyuy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_sgeerDwthydzyuy: orq %r8,%r8 je L$_after_reduction_sgeerDwthydzyuy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_sgeerDwthydzyuy: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_4_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_aAxBGtfyfEadAkB vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_aAxBGtfyfEadAkB L$_16_blocks_overflow_aAxBGtfyfEadAkB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_aAxBGtfyfEadAkB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_Dqjcrneuragvwkw subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_Dqjcrneuragvwkw L$_small_initial_partial_block_Dqjcrneuragvwkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_Dqjcrneuragvwkw: orq %r8,%r8 je L$_after_reduction_Dqjcrneuragvwkw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_Dqjcrneuragvwkw: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_5_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_bpEikxmsheidfwq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_bpEikxmsheidfwq L$_16_blocks_overflow_bpEikxmsheidfwq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_bpEikxmsheidfwq: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AGuqEbsAbinbrDm subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AGuqEbsAbinbrDm L$_small_initial_partial_block_AGuqEbsAbinbrDm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AGuqEbsAbinbrDm: orq %r8,%r8 je L$_after_reduction_AGuqEbsAbinbrDm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AGuqEbsAbinbrDm: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_6_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_otEmDDixbpFEmvy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_otEmDDixbpFEmvy L$_16_blocks_overflow_otEmDDixbpFEmvy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_otEmDDixbpFEmvy: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FAvepDmDsogujha subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FAvepDmDsogujha L$_small_initial_partial_block_FAvepDmDsogujha: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FAvepDmDsogujha: orq %r8,%r8 je L$_after_reduction_FAvepDmDsogujha vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FAvepDmDsogujha: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_7_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_kEvFawDBkeclidj vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_kEvFawDBkeclidj L$_16_blocks_overflow_kEvFawDBkeclidj: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_kEvFawDBkeclidj: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jyvbjxevpurblup subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jyvbjxevpurblup L$_small_initial_partial_block_jyvbjxevpurblup: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jyvbjxevpurblup: orq %r8,%r8 je L$_after_reduction_jyvbjxevpurblup vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jyvbjxevpurblup: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_8_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_nfBegzmtymkjkuE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_nfBegzmtymkjkuE L$_16_blocks_overflow_nfBegzmtymkjkuE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_nfBegzmtymkjkuE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ArBbnussymieuyl subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ArBbnussymieuyl L$_small_initial_partial_block_ArBbnussymieuyl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ArBbnussymieuyl: orq %r8,%r8 je L$_after_reduction_ArBbnussymieuyl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ArBbnussymieuyl: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_9_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_zjmfGFrkFzfxxez vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_zjmfGFrkFzfxxez L$_16_blocks_overflow_zjmfGFrkFzfxxez: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_zjmfGFrkFzfxxez: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_CEuslsjdAFEouni subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_CEuslsjdAFEouni L$_small_initial_partial_block_CEuslsjdAFEouni: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_CEuslsjdAFEouni: orq %r8,%r8 je L$_after_reduction_CEuslsjdAFEouni vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_CEuslsjdAFEouni: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_10_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_BvDkzdlGxbqBdwD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_BvDkzdlGxbqBdwD L$_16_blocks_overflow_BvDkzdlGxbqBdwD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_BvDkzdlGxbqBdwD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FEEAGeFDucwexEe subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FEEAGeFDucwexEe L$_small_initial_partial_block_FEEAGeFDucwexEe: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FEEAGeFDucwexEe: orq %r8,%r8 je L$_after_reduction_FEEAGeFDucwexEe vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FEEAGeFDucwexEe: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_11_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_wfjezxDvGpDnoFf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_wfjezxDvGpDnoFf L$_16_blocks_overflow_wfjezxDvGpDnoFf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_wfjezxDvGpDnoFf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qadlBuzdbwfpDef subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qadlBuzdbwfpDef L$_small_initial_partial_block_qadlBuzdbwfpDef: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qadlBuzdbwfpDef: orq %r8,%r8 je L$_after_reduction_qadlBuzdbwfpDef vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qadlBuzdbwfpDef: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_12_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_pbckDbEtDdqavpn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_pbckDbEtDdqavpn L$_16_blocks_overflow_pbckDbEtDdqavpn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_pbckDbEtDdqavpn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_osuccbBAbutpqse subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_osuccbBAbutpqse L$_small_initial_partial_block_osuccbBAbutpqse: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_osuccbBAbutpqse: orq %r8,%r8 je L$_after_reduction_osuccbBAbutpqse vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_osuccbBAbutpqse: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_13_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_oCotpBuspdAtjpe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_oCotpBuspdAtjpe L$_16_blocks_overflow_oCotpBuspdAtjpe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_oCotpBuspdAtjpe: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mmuEdDpgoEjulrs subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mmuEdDpgoEjulrs L$_small_initial_partial_block_mmuEdDpgoEjulrs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mmuEdDpgoEjulrs: orq %r8,%r8 je L$_after_reduction_mmuEdDpgoEjulrs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_mmuEdDpgoEjulrs: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_14_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_bbvjuqrsjgdyCBn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_bbvjuqrsjgdyCBn L$_16_blocks_overflow_bbvjuqrsjgdyCBn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_bbvjuqrsjgdyCBn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_roGxlxzlgsulhzk subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_roGxlxzlgsulhzk L$_small_initial_partial_block_roGxlxzlgsulhzk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_roGxlxzlgsulhzk: orq %r8,%r8 je L$_after_reduction_roGxlxzlgsulhzk vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_roGxlxzlgsulhzk: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_15_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_GriwFAotfyoEekC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_GriwFAotfyoEekC L$_16_blocks_overflow_GriwFAotfyoEekC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_GriwFAotfyoEekC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_huBogwgwhfClyls subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_huBogwgwhfClyls L$_small_initial_partial_block_huBogwgwhfClyls: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_huBogwgwhfClyls: orq %r8,%r8 je L$_after_reduction_huBogwgwhfClyls vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_huBogwgwhfClyls: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_16_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_sjAcjwAAtCgmwjr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_sjAcjwAAtCgmwjr L$_16_blocks_overflow_sjAcjwAAtCgmwjr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_sjAcjwAAtCgmwjr: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_ayefrejzGqbkfya: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ayefrejzGqbkfya: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ayefrejzGqbkfya: jmp L$_last_blocks_done_aFwwehusEvmmDke L$_last_num_blocks_is_0_aFwwehusEvmmDke: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_aFwwehusEvmmDke: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_jzxBnczDBxGvzop L$_message_below_equal_16_blocks_jzxBnczDBxGvzop: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je L$_small_initial_num_blocks_is_8_wjgmgrFcljfrexe jl L$_small_initial_num_blocks_is_7_1_wjgmgrFcljfrexe cmpq $12,%r12 je L$_small_initial_num_blocks_is_12_wjgmgrFcljfrexe jl L$_small_initial_num_blocks_is_11_9_wjgmgrFcljfrexe cmpq $16,%r12 je L$_small_initial_num_blocks_is_16_wjgmgrFcljfrexe cmpq $15,%r12 je L$_small_initial_num_blocks_is_15_wjgmgrFcljfrexe cmpq $14,%r12 je L$_small_initial_num_blocks_is_14_wjgmgrFcljfrexe jmp L$_small_initial_num_blocks_is_13_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_11_9_wjgmgrFcljfrexe: cmpq $11,%r12 je L$_small_initial_num_blocks_is_11_wjgmgrFcljfrexe cmpq $10,%r12 je L$_small_initial_num_blocks_is_10_wjgmgrFcljfrexe jmp L$_small_initial_num_blocks_is_9_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_7_1_wjgmgrFcljfrexe: cmpq $4,%r12 je L$_small_initial_num_blocks_is_4_wjgmgrFcljfrexe jl L$_small_initial_num_blocks_is_3_1_wjgmgrFcljfrexe cmpq $7,%r12 je L$_small_initial_num_blocks_is_7_wjgmgrFcljfrexe cmpq $6,%r12 je L$_small_initial_num_blocks_is_6_wjgmgrFcljfrexe jmp L$_small_initial_num_blocks_is_5_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_3_1_wjgmgrFcljfrexe: cmpq $3,%r12 je L$_small_initial_num_blocks_is_3_wjgmgrFcljfrexe cmpq $2,%r12 je L$_small_initial_num_blocks_is_2_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_1_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_FfndtjjjGEeCFEr subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FfndtjjjGEeCFEr L$_small_initial_partial_block_FfndtjjjGEeCFEr: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp L$_after_reduction_FfndtjjjGEeCFEr L$_small_initial_compute_done_FfndtjjjGEeCFEr: L$_after_reduction_FfndtjjjGEeCFEr: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_2_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EnhukCdygAFrqou subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EnhukCdygAFrqou L$_small_initial_partial_block_EnhukCdygAFrqou: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EnhukCdygAFrqou: orq %r8,%r8 je L$_after_reduction_EnhukCdygAFrqou vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_EnhukCdygAFrqou: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_3_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_impdlEsbGuAaott subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_impdlEsbGuAaott L$_small_initial_partial_block_impdlEsbGuAaott: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_impdlEsbGuAaott: orq %r8,%r8 je L$_after_reduction_impdlEsbGuAaott vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_impdlEsbGuAaott: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_4_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rBqdjBpBxxfxpoF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rBqdjBpBxxfxpoF L$_small_initial_partial_block_rBqdjBpBxxfxpoF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rBqdjBpBxxfxpoF: orq %r8,%r8 je L$_after_reduction_rBqdjBpBxxfxpoF vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_rBqdjBpBxxfxpoF: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_5_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %xmm29,%xmm3,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vuaskFEqawsiCsj subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vuaskFEqawsiCsj L$_small_initial_partial_block_vuaskFEqawsiCsj: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vuaskFEqawsiCsj: orq %r8,%r8 je L$_after_reduction_vuaskFEqawsiCsj vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_vuaskFEqawsiCsj: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_6_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %ymm29,%ymm3,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dCffBvEqzkjcfvA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dCffBvEqzkjcfvA L$_small_initial_partial_block_dCffBvEqzkjcfvA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dCffBvEqzkjcfvA: orq %r8,%r8 je L$_after_reduction_dCffBvEqzkjcfvA vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_dCffBvEqzkjcfvA: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_7_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yEllnEiichfbFDc subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yEllnEiichfbFDc L$_small_initial_partial_block_yEllnEiichfbFDc: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yEllnEiichfbFDc: orq %r8,%r8 je L$_after_reduction_yEllnEiichfbFDc vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_yEllnEiichfbFDc: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_8_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vBdxtBrlzxbaFcc subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vBdxtBrlzxbaFcc L$_small_initial_partial_block_vBdxtBrlzxbaFcc: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vBdxtBrlzxbaFcc: orq %r8,%r8 je L$_after_reduction_vBdxtBrlzxbaFcc vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_vBdxtBrlzxbaFcc: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_9_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %xmm29,%xmm4,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_scfvxdenebqCdyz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_scfvxdenebqCdyz L$_small_initial_partial_block_scfvxdenebqCdyz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_scfvxdenebqCdyz: orq %r8,%r8 je L$_after_reduction_scfvxdenebqCdyz vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_scfvxdenebqCdyz: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_10_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %ymm29,%ymm4,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_cvcjsgotzqiyevA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_cvcjsgotzqiyevA L$_small_initial_partial_block_cvcjsgotzqiyevA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cvcjsgotzqiyevA: orq %r8,%r8 je L$_after_reduction_cvcjsgotzqiyevA vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_cvcjsgotzqiyevA: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_11_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vuCaGGnzBCpphtu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vuCaGGnzBCpphtu L$_small_initial_partial_block_vuCaGGnzBCpphtu: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vuCaGGnzBCpphtu: orq %r8,%r8 je L$_after_reduction_vuCaGGnzBCpphtu vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_vuCaGGnzBCpphtu: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_12_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qhFhudxmstaFEvA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qhFhudxmstaFEvA L$_small_initial_partial_block_qhFhudxmstaFEvA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qhFhudxmstaFEvA: orq %r8,%r8 je L$_after_reduction_qhFhudxmstaFEvA vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_qhFhudxmstaFEvA: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_13_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %xmm29,%xmm5,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GbEgefaoCcDkbpn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GbEgefaoCcDkbpn L$_small_initial_partial_block_GbEgefaoCcDkbpn: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GbEgefaoCcDkbpn: orq %r8,%r8 je L$_after_reduction_GbEgefaoCcDkbpn vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_GbEgefaoCcDkbpn: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_14_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %ymm29,%ymm5,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hCaaAkupwhFdkkk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hCaaAkupwhFdkkk L$_small_initial_partial_block_hCaaAkupwhFdkkk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hCaaAkupwhFdkkk: orq %r8,%r8 je L$_after_reduction_hCaaAkupwhFdkkk vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_hCaaAkupwhFdkkk: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_15_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kBjkymsezzduvxc subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kBjkymsezzduvxc L$_small_initial_partial_block_kBjkymsezzduvxc: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kBjkymsezzduvxc: orq %r8,%r8 je L$_after_reduction_kBjkymsezzduvxc vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_kBjkymsezzduvxc: jmp L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe L$_small_initial_num_blocks_is_16_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_qxCpdapFxyCuqwj: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qxCpdapFxyCuqwj: vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_qxCpdapFxyCuqwj: L$_small_initial_blocks_encrypted_wjgmgrFcljfrexe: L$_ghash_done_jzxBnczDBxGvzop: vmovdqu64 %xmm2,0(%rsi) L$_enc_dec_done_jzxBnczDBxGvzop: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) L$_enc_dec_abort_jzxBnczDBxGvzop: jmp L$exit_gcm_encrypt .p2align 5 L$aes_gcm_encrypt_256_avx512: orq %r8,%r8 je L$_enc_dec_abort_ralurfzeatcGxDF xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je L$_partial_block_done_yhoCcfnksexDFbx movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge L$_no_extra_mask_yhoCcfnksexDFbx subq %r13,%r12 L$_no_extra_mask_yhoCcfnksexDFbx: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm3,%xmm14,%xmm14 cmpq $0,%r13 jl L$_partial_incomplete_yhoCcfnksexDFbx .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp L$_enc_dec_done_yhoCcfnksexDFbx L$_partial_incomplete_yhoCcfnksexDFbx: addl %r8d,(%rdx) movq %r8,%rax L$_enc_dec_done_yhoCcfnksexDFbx: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} L$_partial_block_done_yhoCcfnksexDFbx: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je L$_enc_dec_done_ralurfzeatcGxDF cmpq $256,%r8 jbe L$_message_below_equal_16_blocks_ralurfzeatcGxDF vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae L$_next_16_overflow_FolitFcvmzDtzbD vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_FolitFcvmzDtzbD L$_next_16_overflow_FolitFcvmzDtzbD: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_FolitFcvmzDtzbD: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 208(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 224(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_slhsqgEufGclFec vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) L$_skip_hkeys_precomputation_slhsqgEufGclFec: cmpq $512,%r8 jb L$_message_below_32_blocks_ralurfzeatcGxDF cmpb $240,%r15b jae L$_next_16_overflow_rpkeAoplfcmnoqe vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_rpkeAoplfcmnoqe L$_next_16_overflow_rpkeAoplfcmnoqe: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_rpkeAoplfcmnoqe: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 208(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 224(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz L$_skip_hkeys_precomputation_wDdhvEhGipECfzn vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) L$_skip_hkeys_precomputation_wDdhvEhGipECfzn: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb L$_no_more_big_nblocks_ralurfzeatcGxDF L$_encrypt_big_nblocks_ralurfzeatcGxDF: cmpb $240,%r15b jae L$_16_blocks_overflow_tcpaCgCtyttEnkC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_tcpaCgCtyttEnkC L$_16_blocks_overflow_tcpaCgCtyttEnkC: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_tcpaCgCtyttEnkC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_fefwzzFqtyGgFsy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_fefwzzFqtyGgFsy L$_16_blocks_overflow_fefwzzFqtyGgFsy: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_fefwzzFqtyGgFsy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_poCaishDCqiAtDd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_poCaishDCqiAtDd L$_16_blocks_overflow_poCaishDCqiAtDd: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_poCaishDCqiAtDd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae L$_encrypt_big_nblocks_ralurfzeatcGxDF L$_no_more_big_nblocks_ralurfzeatcGxDF: cmpq $512,%r8 jae L$_encrypt_32_blocks_ralurfzeatcGxDF cmpq $256,%r8 jae L$_encrypt_16_blocks_ralurfzeatcGxDF L$_encrypt_0_blocks_ghash_32_ralurfzeatcGxDF: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_xdvkfswEyEirzwB cmpl $8,%r10d je L$_last_num_blocks_is_8_xdvkfswEyEirzwB jb L$_last_num_blocks_is_7_1_xdvkfswEyEirzwB cmpl $12,%r10d je L$_last_num_blocks_is_12_xdvkfswEyEirzwB jb L$_last_num_blocks_is_11_9_xdvkfswEyEirzwB cmpl $15,%r10d je L$_last_num_blocks_is_15_xdvkfswEyEirzwB ja L$_last_num_blocks_is_16_xdvkfswEyEirzwB cmpl $14,%r10d je L$_last_num_blocks_is_14_xdvkfswEyEirzwB jmp L$_last_num_blocks_is_13_xdvkfswEyEirzwB L$_last_num_blocks_is_11_9_xdvkfswEyEirzwB: cmpl $10,%r10d je L$_last_num_blocks_is_10_xdvkfswEyEirzwB ja L$_last_num_blocks_is_11_xdvkfswEyEirzwB jmp L$_last_num_blocks_is_9_xdvkfswEyEirzwB L$_last_num_blocks_is_7_1_xdvkfswEyEirzwB: cmpl $4,%r10d je L$_last_num_blocks_is_4_xdvkfswEyEirzwB jb L$_last_num_blocks_is_3_1_xdvkfswEyEirzwB cmpl $6,%r10d ja L$_last_num_blocks_is_7_xdvkfswEyEirzwB je L$_last_num_blocks_is_6_xdvkfswEyEirzwB jmp L$_last_num_blocks_is_5_xdvkfswEyEirzwB L$_last_num_blocks_is_3_1_xdvkfswEyEirzwB: cmpl $2,%r10d ja L$_last_num_blocks_is_3_xdvkfswEyEirzwB je L$_last_num_blocks_is_2_xdvkfswEyEirzwB L$_last_num_blocks_is_1_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_igvodhikativhxs vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_igvodhikativhxs L$_16_blocks_overflow_igvodhikativhxs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_igvodhikativhxs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_GcsipbkriaBjvfi subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GcsipbkriaBjvfi L$_small_initial_partial_block_GcsipbkriaBjvfi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_GcsipbkriaBjvfi L$_small_initial_compute_done_GcsipbkriaBjvfi: L$_after_reduction_GcsipbkriaBjvfi: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_2_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_vsprwaoekjwbkng vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_vsprwaoekjwbkng L$_16_blocks_overflow_vsprwaoekjwbkng: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_vsprwaoekjwbkng: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lhbsspkwfiDtCyr subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lhbsspkwfiDtCyr L$_small_initial_partial_block_lhbsspkwfiDtCyr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lhbsspkwfiDtCyr: orq %r8,%r8 je L$_after_reduction_lhbsspkwfiDtCyr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lhbsspkwfiDtCyr: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_3_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_pdiFfjCElAtekEv vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_pdiFfjCElAtekEv L$_16_blocks_overflow_pdiFfjCElAtekEv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_pdiFfjCElAtekEv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iyftGziCGvzBGwp subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iyftGziCGvzBGwp L$_small_initial_partial_block_iyftGziCGvzBGwp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iyftGziCGvzBGwp: orq %r8,%r8 je L$_after_reduction_iyftGziCGvzBGwp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iyftGziCGvzBGwp: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_4_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_giftEyoltvfgggA vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_giftEyoltvfgggA L$_16_blocks_overflow_giftEyoltvfgggA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_giftEyoltvfgggA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hdzCnewjxBbishd subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hdzCnewjxBbishd L$_small_initial_partial_block_hdzCnewjxBbishd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hdzCnewjxBbishd: orq %r8,%r8 je L$_after_reduction_hdzCnewjxBbishd vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_hdzCnewjxBbishd: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_5_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_orpkewzlnxCGshz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_orpkewzlnxCGshz L$_16_blocks_overflow_orpkewzlnxCGshz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_orpkewzlnxCGshz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kyaoueFfnBudEhA subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kyaoueFfnBudEhA L$_small_initial_partial_block_kyaoueFfnBudEhA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kyaoueFfnBudEhA: orq %r8,%r8 je L$_after_reduction_kyaoueFfnBudEhA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kyaoueFfnBudEhA: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_6_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_orictFjAdfigdzk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_orictFjAdfigdzk L$_16_blocks_overflow_orictFjAdfigdzk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_orictFjAdfigdzk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_sgBbGfbjnccbnkh subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_sgBbGfbjnccbnkh L$_small_initial_partial_block_sgBbGfbjnccbnkh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_sgBbGfbjnccbnkh: orq %r8,%r8 je L$_after_reduction_sgBbGfbjnccbnkh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_sgBbGfbjnccbnkh: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_7_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_ivtabDnDqnrGEcy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_ivtabDnDqnrGEcy L$_16_blocks_overflow_ivtabDnDqnrGEcy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_ivtabDnDqnrGEcy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FwberbenvBxEcDE subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FwberbenvBxEcDE L$_small_initial_partial_block_FwberbenvBxEcDE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FwberbenvBxEcDE: orq %r8,%r8 je L$_after_reduction_FwberbenvBxEcDE vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FwberbenvBxEcDE: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_8_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_uBiojDdgtEoAfGd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_uBiojDdgtEoAfGd L$_16_blocks_overflow_uBiojDdgtEoAfGd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_uBiojDdgtEoAfGd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_twDrbrvhowngEDr subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_twDrbrvhowngEDr L$_small_initial_partial_block_twDrbrvhowngEDr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_twDrbrvhowngEDr: orq %r8,%r8 je L$_after_reduction_twDrbrvhowngEDr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_twDrbrvhowngEDr: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_9_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_FqperxgfhBwCqDo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_FqperxgfhBwCqDo L$_16_blocks_overflow_FqperxgfhBwCqDo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_FqperxgfhBwCqDo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_agoyuAkiGwzDjns subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_agoyuAkiGwzDjns L$_small_initial_partial_block_agoyuAkiGwzDjns: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_agoyuAkiGwzDjns: orq %r8,%r8 je L$_after_reduction_agoyuAkiGwzDjns vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_agoyuAkiGwzDjns: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_10_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_bvimoanuboioxom vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_bvimoanuboioxom L$_16_blocks_overflow_bvimoanuboioxom: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_bvimoanuboioxom: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kblafGutCsvisjA subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kblafGutCsvisjA L$_small_initial_partial_block_kblafGutCsvisjA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kblafGutCsvisjA: orq %r8,%r8 je L$_after_reduction_kblafGutCsvisjA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kblafGutCsvisjA: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_11_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_DcdigDqdkAmpala vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_DcdigDqdkAmpala L$_16_blocks_overflow_DcdigDqdkAmpala: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_DcdigDqdkAmpala: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lFojEFrDvGhrqGC subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lFojEFrDvGhrqGC L$_small_initial_partial_block_lFojEFrDvGhrqGC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lFojEFrDvGhrqGC: orq %r8,%r8 je L$_after_reduction_lFojEFrDvGhrqGC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lFojEFrDvGhrqGC: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_12_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_ijmafkyicqbAgov vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_ijmafkyicqbAgov L$_16_blocks_overflow_ijmafkyicqbAgov: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_ijmafkyicqbAgov: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lbhyvEuvxtzgCqA subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lbhyvEuvxtzgCqA L$_small_initial_partial_block_lbhyvEuvxtzgCqA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lbhyvEuvxtzgCqA: orq %r8,%r8 je L$_after_reduction_lbhyvEuvxtzgCqA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lbhyvEuvxtzgCqA: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_13_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_xewjdgAADiucjCd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_xewjdgAADiucjCd L$_16_blocks_overflow_xewjdgAADiucjCd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_xewjdgAADiucjCd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ihgbCttclcmDtmF subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ihgbCttclcmDtmF L$_small_initial_partial_block_ihgbCttclcmDtmF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ihgbCttclcmDtmF: orq %r8,%r8 je L$_after_reduction_ihgbCttclcmDtmF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ihgbCttclcmDtmF: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_14_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_uxvkthhndspgdct vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_uxvkthhndspgdct L$_16_blocks_overflow_uxvkthhndspgdct: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_uxvkthhndspgdct: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_okkABmocyzkldgz subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_okkABmocyzkldgz L$_small_initial_partial_block_okkABmocyzkldgz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_okkABmocyzkldgz: orq %r8,%r8 je L$_after_reduction_okkABmocyzkldgz vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_okkABmocyzkldgz: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_15_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_fdeajBtuhuyobdz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_fdeajBtuhuyobdz L$_16_blocks_overflow_fdeajBtuhuyobdz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_fdeajBtuhuyobdz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ltEnnExvFfBwyxa subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ltEnnExvFfBwyxa L$_small_initial_partial_block_ltEnnExvFfBwyxa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ltEnnExvFfBwyxa: orq %r8,%r8 je L$_after_reduction_ltEnnExvFfBwyxa vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ltEnnExvFfBwyxa: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_16_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_mxnyyrjuxpBhloh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_mxnyyrjuxpBhloh L$_16_blocks_overflow_mxnyyrjuxpBhloh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_mxnyyrjuxpBhloh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_CFywctAlrBmkufB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_CFywctAlrBmkufB: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_CFywctAlrBmkufB: jmp L$_last_blocks_done_xdvkfswEyEirzwB L$_last_num_blocks_is_0_xdvkfswEyEirzwB: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_xdvkfswEyEirzwB: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_ralurfzeatcGxDF L$_encrypt_32_blocks_ralurfzeatcGxDF: cmpb $240,%r15b jae L$_16_blocks_overflow_maxnEmGesnybyGw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_maxnEmGesnybyGw L$_16_blocks_overflow_maxnEmGesnybyGw: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_maxnEmGesnybyGw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_caDkotybClbwqcs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_caDkotybClbwqcs L$_16_blocks_overflow_caDkotybClbwqcs: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_caDkotybClbwqcs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_CpuqkplkrGqAlEE cmpl $8,%r10d je L$_last_num_blocks_is_8_CpuqkplkrGqAlEE jb L$_last_num_blocks_is_7_1_CpuqkplkrGqAlEE cmpl $12,%r10d je L$_last_num_blocks_is_12_CpuqkplkrGqAlEE jb L$_last_num_blocks_is_11_9_CpuqkplkrGqAlEE cmpl $15,%r10d je L$_last_num_blocks_is_15_CpuqkplkrGqAlEE ja L$_last_num_blocks_is_16_CpuqkplkrGqAlEE cmpl $14,%r10d je L$_last_num_blocks_is_14_CpuqkplkrGqAlEE jmp L$_last_num_blocks_is_13_CpuqkplkrGqAlEE L$_last_num_blocks_is_11_9_CpuqkplkrGqAlEE: cmpl $10,%r10d je L$_last_num_blocks_is_10_CpuqkplkrGqAlEE ja L$_last_num_blocks_is_11_CpuqkplkrGqAlEE jmp L$_last_num_blocks_is_9_CpuqkplkrGqAlEE L$_last_num_blocks_is_7_1_CpuqkplkrGqAlEE: cmpl $4,%r10d je L$_last_num_blocks_is_4_CpuqkplkrGqAlEE jb L$_last_num_blocks_is_3_1_CpuqkplkrGqAlEE cmpl $6,%r10d ja L$_last_num_blocks_is_7_CpuqkplkrGqAlEE je L$_last_num_blocks_is_6_CpuqkplkrGqAlEE jmp L$_last_num_blocks_is_5_CpuqkplkrGqAlEE L$_last_num_blocks_is_3_1_CpuqkplkrGqAlEE: cmpl $2,%r10d ja L$_last_num_blocks_is_3_CpuqkplkrGqAlEE je L$_last_num_blocks_is_2_CpuqkplkrGqAlEE L$_last_num_blocks_is_1_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_alDzwCfDlrwfuue vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_alDzwCfDlrwfuue L$_16_blocks_overflow_alDzwCfDlrwfuue: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_alDzwCfDlrwfuue: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_iBlFpkcubprtgpj subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iBlFpkcubprtgpj L$_small_initial_partial_block_iBlFpkcubprtgpj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_iBlFpkcubprtgpj L$_small_initial_compute_done_iBlFpkcubprtgpj: L$_after_reduction_iBlFpkcubprtgpj: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_2_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_nCqcfaumojsjgbp vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_nCqcfaumojsjgbp L$_16_blocks_overflow_nCqcfaumojsjgbp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_nCqcfaumojsjgbp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yceinkEjzFdqAeG subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yceinkEjzFdqAeG L$_small_initial_partial_block_yceinkEjzFdqAeG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yceinkEjzFdqAeG: orq %r8,%r8 je L$_after_reduction_yceinkEjzFdqAeG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yceinkEjzFdqAeG: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_3_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_uwpbmorybawstbl vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_uwpbmorybawstbl L$_16_blocks_overflow_uwpbmorybawstbl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_uwpbmorybawstbl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_sCduuopFvdCBjgG subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_sCduuopFvdCBjgG L$_small_initial_partial_block_sCduuopFvdCBjgG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_sCduuopFvdCBjgG: orq %r8,%r8 je L$_after_reduction_sCduuopFvdCBjgG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_sCduuopFvdCBjgG: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_4_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_vadkquwycFnaotd vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_vadkquwycFnaotd L$_16_blocks_overflow_vadkquwycFnaotd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_vadkquwycFnaotd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ivhaorpFqBawvwj subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ivhaorpFqBawvwj L$_small_initial_partial_block_ivhaorpFqBawvwj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ivhaorpFqBawvwj: orq %r8,%r8 je L$_after_reduction_ivhaorpFqBawvwj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ivhaorpFqBawvwj: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_5_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_aFkFaFcofvloukl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_aFkFaFcofvloukl L$_16_blocks_overflow_aFkFaFcofvloukl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_aFkFaFcofvloukl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DnveyAaCeDgzdCr subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DnveyAaCeDgzdCr L$_small_initial_partial_block_DnveyAaCeDgzdCr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DnveyAaCeDgzdCr: orq %r8,%r8 je L$_after_reduction_DnveyAaCeDgzdCr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DnveyAaCeDgzdCr: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_6_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_hyGBuzayqDhhsut vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_hyGBuzayqDhhsut L$_16_blocks_overflow_hyGBuzayqDhhsut: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_hyGBuzayqDhhsut: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FildbillAFDaont subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FildbillAFDaont L$_small_initial_partial_block_FildbillAFDaont: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FildbillAFDaont: orq %r8,%r8 je L$_after_reduction_FildbillAFDaont vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FildbillAFDaont: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_7_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_wfwrxhyCBsGqfaa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_wfwrxhyCBsGqfaa L$_16_blocks_overflow_wfwrxhyCBsGqfaa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_wfwrxhyCBsGqfaa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dFDhkscmwibqAtn subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dFDhkscmwibqAtn L$_small_initial_partial_block_dFDhkscmwibqAtn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dFDhkscmwibqAtn: orq %r8,%r8 je L$_after_reduction_dFDhkscmwibqAtn vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dFDhkscmwibqAtn: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_8_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_nwCspduhyDCpabc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_nwCspduhyDCpabc L$_16_blocks_overflow_nwCspduhyDCpabc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_nwCspduhyDCpabc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_atEEroEqtkbEDxn subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_atEEroEqtkbEDxn L$_small_initial_partial_block_atEEroEqtkbEDxn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_atEEroEqtkbEDxn: orq %r8,%r8 je L$_after_reduction_atEEroEqtkbEDxn vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_atEEroEqtkbEDxn: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_9_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_FtfeaayDywckyfd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_FtfeaayDywckyfd L$_16_blocks_overflow_FtfeaayDywckyfd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_FtfeaayDywckyfd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nvbkpkefGjFjFfs subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nvbkpkefGjFjFfs L$_small_initial_partial_block_nvbkpkefGjFjFfs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nvbkpkefGjFjFfs: orq %r8,%r8 je L$_after_reduction_nvbkpkefGjFjFfs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nvbkpkefGjFjFfs: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_10_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_rwkpzgCdusgbwpC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_rwkpzgCdusgbwpC L$_16_blocks_overflow_rwkpzgCdusgbwpC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_rwkpzgCdusgbwpC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tEmDckpEuqBsraf subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tEmDckpEuqBsraf L$_small_initial_partial_block_tEmDckpEuqBsraf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tEmDckpEuqBsraf: orq %r8,%r8 je L$_after_reduction_tEmDckpEuqBsraf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tEmDckpEuqBsraf: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_11_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_lwGByppsljaznxt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_lwGByppsljaznxt L$_16_blocks_overflow_lwGByppsljaznxt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_lwGByppsljaznxt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ilixxtsukzdoAtA subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ilixxtsukzdoAtA L$_small_initial_partial_block_ilixxtsukzdoAtA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ilixxtsukzdoAtA: orq %r8,%r8 je L$_after_reduction_ilixxtsukzdoAtA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ilixxtsukzdoAtA: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_12_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_jbqznyehrlCBlqk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_jbqznyehrlCBlqk L$_16_blocks_overflow_jbqznyehrlCBlqk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_jbqznyehrlCBlqk: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wctpdEkyEmpBhlB subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wctpdEkyEmpBhlB L$_small_initial_partial_block_wctpdEkyEmpBhlB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wctpdEkyEmpBhlB: orq %r8,%r8 je L$_after_reduction_wctpdEkyEmpBhlB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wctpdEkyEmpBhlB: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_13_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_zfoiakgFjhncFgz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_zfoiakgFjhncFgz L$_16_blocks_overflow_zfoiakgFjhncFgz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_zfoiakgFjhncFgz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ECBllyApvBoFquD subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ECBllyApvBoFquD L$_small_initial_partial_block_ECBllyApvBoFquD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ECBllyApvBoFquD: orq %r8,%r8 je L$_after_reduction_ECBllyApvBoFquD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ECBllyApvBoFquD: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_14_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_boaouDrBeEmAnwp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_boaouDrBeEmAnwp L$_16_blocks_overflow_boaouDrBeEmAnwp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_boaouDrBeEmAnwp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_CjBwxsGswEoCtpA subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_CjBwxsGswEoCtpA L$_small_initial_partial_block_CjBwxsGswEoCtpA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_CjBwxsGswEoCtpA: orq %r8,%r8 je L$_after_reduction_CjBwxsGswEoCtpA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_CjBwxsGswEoCtpA: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_15_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_mFdcfdxbaoeAcmw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_mFdcfdxbaoeAcmw L$_16_blocks_overflow_mFdcfdxbaoeAcmw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_mFdcfdxbaoeAcmw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nkpoxiswyhgqlsf subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nkpoxiswyhgqlsf L$_small_initial_partial_block_nkpoxiswyhgqlsf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nkpoxiswyhgqlsf: orq %r8,%r8 je L$_after_reduction_nkpoxiswyhgqlsf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nkpoxiswyhgqlsf: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_16_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_dhDlEwplftmrFtf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_dhDlEwplftmrFtf L$_16_blocks_overflow_dhDlEwplftmrFtf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_dhDlEwplftmrFtf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_iuDhkykBcvvzBFb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iuDhkykBcvvzBFb: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iuDhkykBcvvzBFb: jmp L$_last_blocks_done_CpuqkplkrGqAlEE L$_last_num_blocks_is_0_CpuqkplkrGqAlEE: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_CpuqkplkrGqAlEE: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_ralurfzeatcGxDF L$_encrypt_16_blocks_ralurfzeatcGxDF: cmpb $240,%r15b jae L$_16_blocks_overflow_FGbkcFatDxoofCE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_FGbkcFatDxoofCE L$_16_blocks_overflow_FGbkcFatDxoofCE: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_FGbkcFatDxoofCE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_FesvdmtDyerGEdv cmpl $8,%r10d je L$_last_num_blocks_is_8_FesvdmtDyerGEdv jb L$_last_num_blocks_is_7_1_FesvdmtDyerGEdv cmpl $12,%r10d je L$_last_num_blocks_is_12_FesvdmtDyerGEdv jb L$_last_num_blocks_is_11_9_FesvdmtDyerGEdv cmpl $15,%r10d je L$_last_num_blocks_is_15_FesvdmtDyerGEdv ja L$_last_num_blocks_is_16_FesvdmtDyerGEdv cmpl $14,%r10d je L$_last_num_blocks_is_14_FesvdmtDyerGEdv jmp L$_last_num_blocks_is_13_FesvdmtDyerGEdv L$_last_num_blocks_is_11_9_FesvdmtDyerGEdv: cmpl $10,%r10d je L$_last_num_blocks_is_10_FesvdmtDyerGEdv ja L$_last_num_blocks_is_11_FesvdmtDyerGEdv jmp L$_last_num_blocks_is_9_FesvdmtDyerGEdv L$_last_num_blocks_is_7_1_FesvdmtDyerGEdv: cmpl $4,%r10d je L$_last_num_blocks_is_4_FesvdmtDyerGEdv jb L$_last_num_blocks_is_3_1_FesvdmtDyerGEdv cmpl $6,%r10d ja L$_last_num_blocks_is_7_FesvdmtDyerGEdv je L$_last_num_blocks_is_6_FesvdmtDyerGEdv jmp L$_last_num_blocks_is_5_FesvdmtDyerGEdv L$_last_num_blocks_is_3_1_FesvdmtDyerGEdv: cmpl $2,%r10d ja L$_last_num_blocks_is_3_FesvdmtDyerGEdv je L$_last_num_blocks_is_2_FesvdmtDyerGEdv L$_last_num_blocks_is_1_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_cmjbanhfxFrrojy vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_cmjbanhfxFrrojy L$_16_blocks_overflow_cmjbanhfxFrrojy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_cmjbanhfxFrrojy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_EGeAwrlgtsiFljf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EGeAwrlgtsiFljf L$_small_initial_partial_block_EGeAwrlgtsiFljf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_EGeAwrlgtsiFljf L$_small_initial_compute_done_EGeAwrlgtsiFljf: L$_after_reduction_EGeAwrlgtsiFljf: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_2_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_EgjyoropybwcGcn vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_EgjyoropybwcGcn L$_16_blocks_overflow_EgjyoropybwcGcn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_EgjyoropybwcGcn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rrppsiDyiwwbqbf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rrppsiDyiwwbqbf L$_small_initial_partial_block_rrppsiDyiwwbqbf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rrppsiDyiwwbqbf: orq %r8,%r8 je L$_after_reduction_rrppsiDyiwwbqbf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rrppsiDyiwwbqbf: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_3_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_wGGmGvscmpGfnny vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_wGGmGvscmpGfnny L$_16_blocks_overflow_wGGmGvscmpGfnny: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_wGGmGvscmpGfnny: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_pFvDrkCwqwAamnn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pFvDrkCwqwAamnn L$_small_initial_partial_block_pFvDrkCwqwAamnn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pFvDrkCwqwAamnn: orq %r8,%r8 je L$_after_reduction_pFvDrkCwqwAamnn vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_pFvDrkCwqwAamnn: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_4_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_lnowafuogaacgct vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_lnowafuogaacgct L$_16_blocks_overflow_lnowafuogaacgct: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_lnowafuogaacgct: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yqgqaEocfqiFkDi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yqgqaEocfqiFkDi L$_small_initial_partial_block_yqgqaEocfqiFkDi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yqgqaEocfqiFkDi: orq %r8,%r8 je L$_after_reduction_yqgqaEocfqiFkDi vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yqgqaEocfqiFkDi: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_5_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_trmgpGgtzmsExiu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_trmgpGgtzmsExiu L$_16_blocks_overflow_trmgpGgtzmsExiu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_trmgpGgtzmsExiu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vuyopzdEphdnacq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vuyopzdEphdnacq L$_small_initial_partial_block_vuyopzdEphdnacq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vuyopzdEphdnacq: orq %r8,%r8 je L$_after_reduction_vuyopzdEphdnacq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vuyopzdEphdnacq: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_6_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_FwaeBcDAewBtpAB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_FwaeBcDAewBtpAB L$_16_blocks_overflow_FwaeBcDAewBtpAB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_FwaeBcDAewBtpAB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rdtAwwiDCCqmaAa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rdtAwwiDCCqmaAa L$_small_initial_partial_block_rdtAwwiDCCqmaAa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rdtAwwiDCCqmaAa: orq %r8,%r8 je L$_after_reduction_rdtAwwiDCCqmaAa vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rdtAwwiDCCqmaAa: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_7_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_AnyscuqxAspkzsl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_AnyscuqxAspkzsl L$_16_blocks_overflow_AnyscuqxAspkzsl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_AnyscuqxAspkzsl: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_digiiCypcjzldxx subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_digiiCypcjzldxx L$_small_initial_partial_block_digiiCypcjzldxx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_digiiCypcjzldxx: orq %r8,%r8 je L$_after_reduction_digiiCypcjzldxx vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_digiiCypcjzldxx: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_8_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_cgqpkbbBmprdEnv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_cgqpkbbBmprdEnv L$_16_blocks_overflow_cgqpkbbBmprdEnv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_cgqpkbbBmprdEnv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_sEakaptGjtmocyA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_sEakaptGjtmocyA L$_small_initial_partial_block_sEakaptGjtmocyA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_sEakaptGjtmocyA: orq %r8,%r8 je L$_after_reduction_sEakaptGjtmocyA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_sEakaptGjtmocyA: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_9_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_ovcajrDEfpdjwcF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_ovcajrDEfpdjwcF L$_16_blocks_overflow_ovcajrDEfpdjwcF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_ovcajrDEfpdjwcF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wwoArvEqahCsDin subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wwoArvEqahCsDin L$_small_initial_partial_block_wwoArvEqahCsDin: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wwoArvEqahCsDin: orq %r8,%r8 je L$_after_reduction_wwoArvEqahCsDin vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wwoArvEqahCsDin: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_10_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_xyisBwjDghCtkcq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_xyisBwjDghCtkcq L$_16_blocks_overflow_xyisBwjDghCtkcq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_xyisBwjDghCtkcq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_maGzmchmgBAsGGp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_maGzmchmgBAsGGp L$_small_initial_partial_block_maGzmchmgBAsGGp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_maGzmchmgBAsGGp: orq %r8,%r8 je L$_after_reduction_maGzmchmgBAsGGp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_maGzmchmgBAsGGp: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_11_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_oCaueqhtnkiqikA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_oCaueqhtnkiqikA L$_16_blocks_overflow_oCaueqhtnkiqikA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_oCaueqhtnkiqikA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rwuhidithmAtnfF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rwuhidithmAtnfF L$_small_initial_partial_block_rwuhidithmAtnfF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rwuhidithmAtnfF: orq %r8,%r8 je L$_after_reduction_rwuhidithmAtnfF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rwuhidithmAtnfF: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_12_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_xwjsvxAnBhmckaz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_xwjsvxAnBhmckaz L$_16_blocks_overflow_xwjsvxAnBhmckaz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_xwjsvxAnBhmckaz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_glqGCCyiublvFga subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_glqGCCyiublvFga L$_small_initial_partial_block_glqGCCyiublvFga: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_glqGCCyiublvFga: orq %r8,%r8 je L$_after_reduction_glqGCCyiublvFga vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_glqGCCyiublvFga: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_13_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_jfgktdduAaBgqFv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_jfgktdduAaBgqFv L$_16_blocks_overflow_jfgktdduAaBgqFv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_jfgktdduAaBgqFv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_deedxboGavqljAa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_deedxboGavqljAa L$_small_initial_partial_block_deedxboGavqljAa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_deedxboGavqljAa: orq %r8,%r8 je L$_after_reduction_deedxboGavqljAa vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_deedxboGavqljAa: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_14_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_xdtrxodfgwcifbm vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_xdtrxodfgwcifbm L$_16_blocks_overflow_xdtrxodfgwcifbm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_xdtrxodfgwcifbm: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lhfnbffaAGncxjA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lhfnbffaAGncxjA L$_small_initial_partial_block_lhfnbffaAGncxjA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lhfnbffaAGncxjA: orq %r8,%r8 je L$_after_reduction_lhfnbffaAGncxjA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lhfnbffaAGncxjA: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_15_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_FrBtEqtdGyajfFu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_FrBtEqtdGyajfFu L$_16_blocks_overflow_FrBtEqtdGyajfFu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_FrBtEqtdGyajfFu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DlintgAmylyraad subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DlintgAmylyraad L$_small_initial_partial_block_DlintgAmylyraad: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DlintgAmylyraad: orq %r8,%r8 je L$_after_reduction_DlintgAmylyraad vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DlintgAmylyraad: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_16_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_ofhxurlakbuiiab vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_ofhxurlakbuiiab L$_16_blocks_overflow_ofhxurlakbuiiab: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_ofhxurlakbuiiab: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_niAfluBnEgrukbj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_niAfluBnEgrukbj: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_niAfluBnEgrukbj: jmp L$_last_blocks_done_FesvdmtDyerGEdv L$_last_num_blocks_is_0_FesvdmtDyerGEdv: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_FesvdmtDyerGEdv: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_ralurfzeatcGxDF L$_message_below_32_blocks_ralurfzeatcGxDF: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_BiAvfDwrflaDzBx vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) L$_skip_hkeys_precomputation_BiAvfDwrflaDzBx: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_pnzuldcucuyingq cmpl $8,%r10d je L$_last_num_blocks_is_8_pnzuldcucuyingq jb L$_last_num_blocks_is_7_1_pnzuldcucuyingq cmpl $12,%r10d je L$_last_num_blocks_is_12_pnzuldcucuyingq jb L$_last_num_blocks_is_11_9_pnzuldcucuyingq cmpl $15,%r10d je L$_last_num_blocks_is_15_pnzuldcucuyingq ja L$_last_num_blocks_is_16_pnzuldcucuyingq cmpl $14,%r10d je L$_last_num_blocks_is_14_pnzuldcucuyingq jmp L$_last_num_blocks_is_13_pnzuldcucuyingq L$_last_num_blocks_is_11_9_pnzuldcucuyingq: cmpl $10,%r10d je L$_last_num_blocks_is_10_pnzuldcucuyingq ja L$_last_num_blocks_is_11_pnzuldcucuyingq jmp L$_last_num_blocks_is_9_pnzuldcucuyingq L$_last_num_blocks_is_7_1_pnzuldcucuyingq: cmpl $4,%r10d je L$_last_num_blocks_is_4_pnzuldcucuyingq jb L$_last_num_blocks_is_3_1_pnzuldcucuyingq cmpl $6,%r10d ja L$_last_num_blocks_is_7_pnzuldcucuyingq je L$_last_num_blocks_is_6_pnzuldcucuyingq jmp L$_last_num_blocks_is_5_pnzuldcucuyingq L$_last_num_blocks_is_3_1_pnzuldcucuyingq: cmpl $2,%r10d ja L$_last_num_blocks_is_3_pnzuldcucuyingq je L$_last_num_blocks_is_2_pnzuldcucuyingq L$_last_num_blocks_is_1_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_zDsbocmrpEvnicC vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_zDsbocmrpEvnicC L$_16_blocks_overflow_zDsbocmrpEvnicC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_zDsbocmrpEvnicC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_GkBxoqrqufclksk subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GkBxoqrqufclksk L$_small_initial_partial_block_GkBxoqrqufclksk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_GkBxoqrqufclksk L$_small_initial_compute_done_GkBxoqrqufclksk: L$_after_reduction_GkBxoqrqufclksk: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_2_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_fkqtFBuohiwoapu vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_fkqtFBuohiwoapu L$_16_blocks_overflow_fkqtFBuohiwoapu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_fkqtFBuohiwoapu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vxviotokbwbgyEt subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vxviotokbwbgyEt L$_small_initial_partial_block_vxviotokbwbgyEt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vxviotokbwbgyEt: orq %r8,%r8 je L$_after_reduction_vxviotokbwbgyEt vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vxviotokbwbgyEt: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_3_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_myfxreEhmAEiFvd vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_myfxreEhmAEiFvd L$_16_blocks_overflow_myfxreEhmAEiFvd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_myfxreEhmAEiFvd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_cvvlAqBdybFdjiy subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_cvvlAqBdybFdjiy L$_small_initial_partial_block_cvvlAqBdybFdjiy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cvvlAqBdybFdjiy: orq %r8,%r8 je L$_after_reduction_cvvlAqBdybFdjiy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_cvvlAqBdybFdjiy: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_4_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_EshcbGrbbBjGmFs vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_EshcbGrbbBjGmFs L$_16_blocks_overflow_EshcbGrbbBjGmFs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_EshcbGrbbBjGmFs: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GfeakfatCkpGtjm subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GfeakfatCkpGtjm L$_small_initial_partial_block_GfeakfatCkpGtjm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GfeakfatCkpGtjm: orq %r8,%r8 je L$_after_reduction_GfeakfatCkpGtjm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GfeakfatCkpGtjm: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_5_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_rBzncCcAACDmBwu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_rBzncCcAACDmBwu L$_16_blocks_overflow_rBzncCcAACDmBwu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_rBzncCcAACDmBwu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AstwCzCrFBsuGAb subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AstwCzCrFBsuGAb L$_small_initial_partial_block_AstwCzCrFBsuGAb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AstwCzCrFBsuGAb: orq %r8,%r8 je L$_after_reduction_AstwCzCrFBsuGAb vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AstwCzCrFBsuGAb: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_6_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_yghnlDweoeGyiyD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_yghnlDweoeGyiyD L$_16_blocks_overflow_yghnlDweoeGyiyD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_yghnlDweoeGyiyD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nAqArzgnghAposf subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nAqArzgnghAposf L$_small_initial_partial_block_nAqArzgnghAposf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nAqArzgnghAposf: orq %r8,%r8 je L$_after_reduction_nAqArzgnghAposf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nAqArzgnghAposf: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_7_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_stoalvbzsyrkrBC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_stoalvbzsyrkrBC L$_16_blocks_overflow_stoalvbzsyrkrBC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_stoalvbzsyrkrBC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tvAfmkadqFgykwd subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tvAfmkadqFgykwd L$_small_initial_partial_block_tvAfmkadqFgykwd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tvAfmkadqFgykwd: orq %r8,%r8 je L$_after_reduction_tvAfmkadqFgykwd vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tvAfmkadqFgykwd: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_8_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_miFDzcCBFGrssiv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_miFDzcCBFGrssiv L$_16_blocks_overflow_miFDzcCBFGrssiv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_miFDzcCBFGrssiv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dnvdvgGCEkvixhc subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dnvdvgGCEkvixhc L$_small_initial_partial_block_dnvdvgGCEkvixhc: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dnvdvgGCEkvixhc: orq %r8,%r8 je L$_after_reduction_dnvdvgGCEkvixhc vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dnvdvgGCEkvixhc: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_9_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_lkCdskAdsidpkuw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_lkCdskAdsidpkuw L$_16_blocks_overflow_lkCdskAdsidpkuw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_lkCdskAdsidpkuw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BvuayrqCbqotfzl subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BvuayrqCbqotfzl L$_small_initial_partial_block_BvuayrqCbqotfzl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BvuayrqCbqotfzl: orq %r8,%r8 je L$_after_reduction_BvuayrqCbqotfzl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BvuayrqCbqotfzl: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_10_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_hktAeBlvDcCnios vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_hktAeBlvDcCnios L$_16_blocks_overflow_hktAeBlvDcCnios: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_hktAeBlvDcCnios: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qDkapAwwDbttzcj subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qDkapAwwDbttzcj L$_small_initial_partial_block_qDkapAwwDbttzcj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qDkapAwwDbttzcj: orq %r8,%r8 je L$_after_reduction_qDkapAwwDbttzcj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qDkapAwwDbttzcj: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_11_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_bblFcfwEgdzswCm vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_bblFcfwEgdzswCm L$_16_blocks_overflow_bblFcfwEgdzswCm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_bblFcfwEgdzswCm: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hxkcdbddneddmzb subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hxkcdbddneddmzb L$_small_initial_partial_block_hxkcdbddneddmzb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hxkcdbddneddmzb: orq %r8,%r8 je L$_after_reduction_hxkcdbddneddmzb vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_hxkcdbddneddmzb: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_12_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_qmmgmehghErCGvF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_qmmgmehghErCGvF L$_16_blocks_overflow_qmmgmehghErCGvF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_qmmgmehghErCGvF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_bGACCFiDoxkcuwq subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_bGACCFiDoxkcuwq L$_small_initial_partial_block_bGACCFiDoxkcuwq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bGACCFiDoxkcuwq: orq %r8,%r8 je L$_after_reduction_bGACCFiDoxkcuwq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_bGACCFiDoxkcuwq: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_13_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_dulzkutdgjakGvB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_dulzkutdgjakGvB L$_16_blocks_overflow_dulzkutdgjakGvB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_dulzkutdgjakGvB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dmbcxBEdtigsClF subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dmbcxBEdtigsClF L$_small_initial_partial_block_dmbcxBEdtigsClF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dmbcxBEdtigsClF: orq %r8,%r8 je L$_after_reduction_dmbcxBEdtigsClF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dmbcxBEdtigsClF: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_14_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_nntbrGkellunBas vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_nntbrGkellunBas L$_16_blocks_overflow_nntbrGkellunBas: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_nntbrGkellunBas: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_aopDguzqabquECi subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_aopDguzqabquECi L$_small_initial_partial_block_aopDguzqabquECi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_aopDguzqabquECi: orq %r8,%r8 je L$_after_reduction_aopDguzqabquECi vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_aopDguzqabquECi: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_15_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_gqGDtzmCceFkfal vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_gqGDtzmCceFkfal L$_16_blocks_overflow_gqGDtzmCceFkfal: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_gqGDtzmCceFkfal: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kkvugeyiFsBldFy subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kkvugeyiFsBldFy L$_small_initial_partial_block_kkvugeyiFsBldFy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kkvugeyiFsBldFy: orq %r8,%r8 je L$_after_reduction_kkvugeyiFsBldFy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kkvugeyiFsBldFy: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_16_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_nnArmAxpgvlqCpA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_nnArmAxpgvlqCpA L$_16_blocks_overflow_nnArmAxpgvlqCpA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_nnArmAxpgvlqCpA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_uqdvluxFgGqdFqv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_uqdvluxFgGqdFqv: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_uqdvluxFgGqdFqv: jmp L$_last_blocks_done_pnzuldcucuyingq L$_last_num_blocks_is_0_pnzuldcucuyingq: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_pnzuldcucuyingq: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_ralurfzeatcGxDF L$_message_below_equal_16_blocks_ralurfzeatcGxDF: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je L$_small_initial_num_blocks_is_8_hdjaAabmubhzgrE jl L$_small_initial_num_blocks_is_7_1_hdjaAabmubhzgrE cmpq $12,%r12 je L$_small_initial_num_blocks_is_12_hdjaAabmubhzgrE jl L$_small_initial_num_blocks_is_11_9_hdjaAabmubhzgrE cmpq $16,%r12 je L$_small_initial_num_blocks_is_16_hdjaAabmubhzgrE cmpq $15,%r12 je L$_small_initial_num_blocks_is_15_hdjaAabmubhzgrE cmpq $14,%r12 je L$_small_initial_num_blocks_is_14_hdjaAabmubhzgrE jmp L$_small_initial_num_blocks_is_13_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_11_9_hdjaAabmubhzgrE: cmpq $11,%r12 je L$_small_initial_num_blocks_is_11_hdjaAabmubhzgrE cmpq $10,%r12 je L$_small_initial_num_blocks_is_10_hdjaAabmubhzgrE jmp L$_small_initial_num_blocks_is_9_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_7_1_hdjaAabmubhzgrE: cmpq $4,%r12 je L$_small_initial_num_blocks_is_4_hdjaAabmubhzgrE jl L$_small_initial_num_blocks_is_3_1_hdjaAabmubhzgrE cmpq $7,%r12 je L$_small_initial_num_blocks_is_7_hdjaAabmubhzgrE cmpq $6,%r12 je L$_small_initial_num_blocks_is_6_hdjaAabmubhzgrE jmp L$_small_initial_num_blocks_is_5_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_3_1_hdjaAabmubhzgrE: cmpq $3,%r12 je L$_small_initial_num_blocks_is_3_hdjaAabmubhzgrE cmpq $2,%r12 je L$_small_initial_num_blocks_is_2_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_1_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_oglzypDCtpAhyGa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_oglzypDCtpAhyGa L$_small_initial_partial_block_oglzypDCtpAhyGa: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp L$_after_reduction_oglzypDCtpAhyGa L$_small_initial_compute_done_oglzypDCtpAhyGa: L$_after_reduction_oglzypDCtpAhyGa: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_2_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mwbBGGvalpfhfnw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mwbBGGvalpfhfnw L$_small_initial_partial_block_mwbBGGvalpfhfnw: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mwbBGGvalpfhfnw: orq %r8,%r8 je L$_after_reduction_mwbBGGvalpfhfnw vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_mwbBGGvalpfhfnw: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_3_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_njdmEDjqDqutzfl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_njdmEDjqDqutzfl L$_small_initial_partial_block_njdmEDjqDqutzfl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_njdmEDjqDqutzfl: orq %r8,%r8 je L$_after_reduction_njdmEDjqDqutzfl vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_njdmEDjqDqutzfl: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_4_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EsFwhCqwxAhrvFa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EsFwhCqwxAhrvFa L$_small_initial_partial_block_EsFwhCqwxAhrvFa: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EsFwhCqwxAhrvFa: orq %r8,%r8 je L$_after_reduction_EsFwhCqwxAhrvFa vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_EsFwhCqwxAhrvFa: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_5_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %xmm29,%xmm3,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_aiubnzDkbAjBaGt subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_aiubnzDkbAjBaGt L$_small_initial_partial_block_aiubnzDkbAjBaGt: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_aiubnzDkbAjBaGt: orq %r8,%r8 je L$_after_reduction_aiubnzDkbAjBaGt vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_aiubnzDkbAjBaGt: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_6_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %ymm29,%ymm3,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GodgzzxioGrdAeg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GodgzzxioGrdAeg L$_small_initial_partial_block_GodgzzxioGrdAeg: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GodgzzxioGrdAeg: orq %r8,%r8 je L$_after_reduction_GodgzzxioGrdAeg vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_GodgzzxioGrdAeg: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_7_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jmqmzBeujCAjAxl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jmqmzBeujCAjAxl L$_small_initial_partial_block_jmqmzBeujCAjAxl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jmqmzBeujCAjAxl: orq %r8,%r8 je L$_after_reduction_jmqmzBeujCAjAxl vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_jmqmzBeujCAjAxl: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_8_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lGwzbjEigiuyrxp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lGwzbjEigiuyrxp L$_small_initial_partial_block_lGwzbjEigiuyrxp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lGwzbjEigiuyrxp: orq %r8,%r8 je L$_after_reduction_lGwzbjEigiuyrxp vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_lGwzbjEigiuyrxp: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_9_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %xmm29,%xmm4,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wtbpkxoFvlcvhkk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wtbpkxoFvlcvhkk L$_small_initial_partial_block_wtbpkxoFvlcvhkk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wtbpkxoFvlcvhkk: orq %r8,%r8 je L$_after_reduction_wtbpkxoFvlcvhkk vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_wtbpkxoFvlcvhkk: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_10_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %ymm29,%ymm4,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zDgaEerElzafAjF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zDgaEerElzafAjF L$_small_initial_partial_block_zDgaEerElzafAjF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zDgaEerElzafAjF: orq %r8,%r8 je L$_after_reduction_zDgaEerElzafAjF vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_zDgaEerElzafAjF: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_11_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BrkzfboGqlhyAvb subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BrkzfboGqlhyAvb L$_small_initial_partial_block_BrkzfboGqlhyAvb: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BrkzfboGqlhyAvb: orq %r8,%r8 je L$_after_reduction_BrkzfboGqlhyAvb vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_BrkzfboGqlhyAvb: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_12_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wBuxadGqDBDeard subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wBuxadGqDBDeard L$_small_initial_partial_block_wBuxadGqDBDeard: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wBuxadGqDBDeard: orq %r8,%r8 je L$_after_reduction_wBuxadGqDBDeard vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_wBuxadGqDBDeard: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_13_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %xmm29,%xmm5,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_koBzdarsEboqwan subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_koBzdarsEboqwan L$_small_initial_partial_block_koBzdarsEboqwan: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_koBzdarsEboqwan: orq %r8,%r8 je L$_after_reduction_koBzdarsEboqwan vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_koBzdarsEboqwan: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_14_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %ymm29,%ymm5,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EhgwDyGvdzvgvtp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EhgwDyGvdzvgvtp L$_small_initial_partial_block_EhgwDyGvdzvgvtp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EhgwDyGvdzvgvtp: orq %r8,%r8 je L$_after_reduction_EhgwDyGvdzvgvtp vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_EhgwDyGvdzvgvtp: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_15_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dguvDqtayFqucCq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dguvDqtayFqucCq L$_small_initial_partial_block_dguvDqtayFqucCq: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dguvDqtayFqucCq: orq %r8,%r8 je L$_after_reduction_dguvDqtayFqucCq vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_dguvDqtayFqucCq: jmp L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE L$_small_initial_num_blocks_is_16_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_mlladecCGcaEame: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mlladecCGcaEame: vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_mlladecCGcaEame: L$_small_initial_blocks_encrypted_hdjaAabmubhzgrE: L$_ghash_done_ralurfzeatcGxDF: vmovdqu64 %xmm2,0(%rsi) L$_enc_dec_done_ralurfzeatcGxDF: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) L$_enc_dec_abort_ralurfzeatcGxDF: jmp L$exit_gcm_encrypt L$exit_gcm_encrypt: cmpq $256,%r8 jbe L$skip_hkeys_cleanup_cccrurCdlggtEnk vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %zmm0,0(%rsp) vmovdqa64 %zmm0,64(%rsp) vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) vmovdqa64 %zmm0,320(%rsp) vmovdqa64 %zmm0,384(%rsp) vmovdqa64 %zmm0,448(%rsp) vmovdqa64 %zmm0,512(%rsp) vmovdqa64 %zmm0,576(%rsp) vmovdqa64 %zmm0,640(%rsp) vmovdqa64 %zmm0,704(%rsp) L$skip_hkeys_cleanup_cccrurCdlggtEnk: vzeroupper leaq (%rbp),%rsp popq %r15 popq %r14 popq %r13 popq %r12 popq %rbp popq %rbx .byte 0xf3,0xc3 L$encrypt_seh_end: .globl _aes_gcm_decrypt_avx512 .private_extern _aes_gcm_decrypt_avx512 .p2align 5 _aes_gcm_decrypt_avx512: L$decrypt_seh_begin: .byte 243,15,30,250 pushq %rbx L$decrypt_seh_push_rbx: pushq %rbp L$decrypt_seh_push_rbp: pushq %r12 L$decrypt_seh_push_r12: pushq %r13 L$decrypt_seh_push_r13: pushq %r14 L$decrypt_seh_push_r14: pushq %r15 L$decrypt_seh_push_r15: leaq 0(%rsp),%rbp L$decrypt_seh_setfp: L$decrypt_seh_prolog_end: subq $1588,%rsp andq $(-64),%rsp movl 240(%rdi),%eax cmpl $9,%eax je L$aes_gcm_decrypt_128_avx512 cmpl $11,%eax je L$aes_gcm_decrypt_192_avx512 cmpl $13,%eax je L$aes_gcm_decrypt_256_avx512 xorl %eax,%eax jmp L$exit_gcm_decrypt .p2align 5 L$aes_gcm_decrypt_128_avx512: orq %r8,%r8 je L$_enc_dec_abort_icBhFhCkojGgnBc xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je L$_partial_block_done_Cwuafefseqcgife movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vmovdqa64 %xmm0,%xmm6 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge L$_no_extra_mask_Cwuafefseqcgife subq %r13,%r12 L$_no_extra_mask_Cwuafefseqcgife: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpand %xmm0,%xmm6,%xmm6 vpshufb SHUF_MASK(%rip),%xmm6,%xmm6 vpshufb %xmm5,%xmm6,%xmm6 vpxorq %xmm6,%xmm14,%xmm14 cmpq $0,%r13 jl L$_partial_incomplete_Cwuafefseqcgife .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp L$_enc_dec_done_Cwuafefseqcgife L$_partial_incomplete_Cwuafefseqcgife: addl %r8d,(%rdx) movq %r8,%rax L$_enc_dec_done_Cwuafefseqcgife: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} L$_partial_block_done_Cwuafefseqcgife: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je L$_enc_dec_done_icBhFhCkojGgnBc cmpq $256,%r8 jbe L$_message_below_equal_16_blocks_icBhFhCkojGgnBc vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae L$_next_16_overflow_DkBvliAEspzoabf vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_DkBvliAEspzoabf L$_next_16_overflow_DkBvliAEspzoabf: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_DkBvliAEspzoabf: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_yDAnEECuuGxfwvr vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) L$_skip_hkeys_precomputation_yDAnEECuuGxfwvr: cmpq $512,%r8 jb L$_message_below_32_blocks_icBhFhCkojGgnBc cmpb $240,%r15b jae L$_next_16_overflow_lgmzdneusufrFmr vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_lgmzdneusufrFmr L$_next_16_overflow_lgmzdneusufrFmr: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_lgmzdneusufrFmr: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz L$_skip_hkeys_precomputation_ecaeaEFhspgwivG vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) L$_skip_hkeys_precomputation_ecaeaEFhspgwivG: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb L$_no_more_big_nblocks_icBhFhCkojGgnBc L$_encrypt_big_nblocks_icBhFhCkojGgnBc: cmpb $240,%r15b jae L$_16_blocks_overflow_ApzaumldtosGeir vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_ApzaumldtosGeir L$_16_blocks_overflow_ApzaumldtosGeir: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_ApzaumldtosGeir: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_ubdpEpmjBbFwdEm vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_ubdpEpmjBbFwdEm L$_16_blocks_overflow_ubdpEpmjBbFwdEm: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_ubdpEpmjBbFwdEm: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_EdBasfawgBetkCB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_EdBasfawgBetkCB L$_16_blocks_overflow_EdBasfawgBetkCB: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_EdBasfawgBetkCB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae L$_encrypt_big_nblocks_icBhFhCkojGgnBc L$_no_more_big_nblocks_icBhFhCkojGgnBc: cmpq $512,%r8 jae L$_encrypt_32_blocks_icBhFhCkojGgnBc cmpq $256,%r8 jae L$_encrypt_16_blocks_icBhFhCkojGgnBc L$_encrypt_0_blocks_ghash_32_icBhFhCkojGgnBc: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_lGwozrmByuyygbo cmpl $8,%r10d je L$_last_num_blocks_is_8_lGwozrmByuyygbo jb L$_last_num_blocks_is_7_1_lGwozrmByuyygbo cmpl $12,%r10d je L$_last_num_blocks_is_12_lGwozrmByuyygbo jb L$_last_num_blocks_is_11_9_lGwozrmByuyygbo cmpl $15,%r10d je L$_last_num_blocks_is_15_lGwozrmByuyygbo ja L$_last_num_blocks_is_16_lGwozrmByuyygbo cmpl $14,%r10d je L$_last_num_blocks_is_14_lGwozrmByuyygbo jmp L$_last_num_blocks_is_13_lGwozrmByuyygbo L$_last_num_blocks_is_11_9_lGwozrmByuyygbo: cmpl $10,%r10d je L$_last_num_blocks_is_10_lGwozrmByuyygbo ja L$_last_num_blocks_is_11_lGwozrmByuyygbo jmp L$_last_num_blocks_is_9_lGwozrmByuyygbo L$_last_num_blocks_is_7_1_lGwozrmByuyygbo: cmpl $4,%r10d je L$_last_num_blocks_is_4_lGwozrmByuyygbo jb L$_last_num_blocks_is_3_1_lGwozrmByuyygbo cmpl $6,%r10d ja L$_last_num_blocks_is_7_lGwozrmByuyygbo je L$_last_num_blocks_is_6_lGwozrmByuyygbo jmp L$_last_num_blocks_is_5_lGwozrmByuyygbo L$_last_num_blocks_is_3_1_lGwozrmByuyygbo: cmpl $2,%r10d ja L$_last_num_blocks_is_3_lGwozrmByuyygbo je L$_last_num_blocks_is_2_lGwozrmByuyygbo L$_last_num_blocks_is_1_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_lClAAkfGiaxqtqb vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_lClAAkfGiaxqtqb L$_16_blocks_overflow_lClAAkfGiaxqtqb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_lClAAkfGiaxqtqb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_AljhFopbDmohEEm subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AljhFopbDmohEEm L$_small_initial_partial_block_AljhFopbDmohEEm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_AljhFopbDmohEEm L$_small_initial_compute_done_AljhFopbDmohEEm: L$_after_reduction_AljhFopbDmohEEm: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_2_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_dxfcclgCzfhujoB vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_dxfcclgCzfhujoB L$_16_blocks_overflow_dxfcclgCzfhujoB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_dxfcclgCzfhujoB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mhEghxGxhmrFGgF subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mhEghxGxhmrFGgF L$_small_initial_partial_block_mhEghxGxhmrFGgF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mhEghxGxhmrFGgF: orq %r8,%r8 je L$_after_reduction_mhEghxGxhmrFGgF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_mhEghxGxhmrFGgF: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_3_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_GzfdDtolkqgqFel vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_GzfdDtolkqgqFel L$_16_blocks_overflow_GzfdDtolkqgqFel: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_GzfdDtolkqgqFel: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qwrgpmqrxkxvCzs subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qwrgpmqrxkxvCzs L$_small_initial_partial_block_qwrgpmqrxkxvCzs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qwrgpmqrxkxvCzs: orq %r8,%r8 je L$_after_reduction_qwrgpmqrxkxvCzs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qwrgpmqrxkxvCzs: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_4_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_tFlldonsxgiBeEi vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_tFlldonsxgiBeEi L$_16_blocks_overflow_tFlldonsxgiBeEi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_tFlldonsxgiBeEi: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_gDoehbqfcmrseCg subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_gDoehbqfcmrseCg L$_small_initial_partial_block_gDoehbqfcmrseCg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_gDoehbqfcmrseCg: orq %r8,%r8 je L$_after_reduction_gDoehbqfcmrseCg vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_gDoehbqfcmrseCg: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_5_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_lAbhcGDwivukqtE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_lAbhcGDwivukqtE L$_16_blocks_overflow_lAbhcGDwivukqtE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_lAbhcGDwivukqtE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ewowyEuhltFopkj subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ewowyEuhltFopkj L$_small_initial_partial_block_ewowyEuhltFopkj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ewowyEuhltFopkj: orq %r8,%r8 je L$_after_reduction_ewowyEuhltFopkj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ewowyEuhltFopkj: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_6_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_xsoFcrclantxpei vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_xsoFcrclantxpei L$_16_blocks_overflow_xsoFcrclantxpei: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_xsoFcrclantxpei: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lzfnkiFifvcmjit subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lzfnkiFifvcmjit L$_small_initial_partial_block_lzfnkiFifvcmjit: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lzfnkiFifvcmjit: orq %r8,%r8 je L$_after_reduction_lzfnkiFifvcmjit vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lzfnkiFifvcmjit: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_7_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_xeeduBscFEzvdva vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_xeeduBscFEzvdva L$_16_blocks_overflow_xeeduBscFEzvdva: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_xeeduBscFEzvdva: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DrhotzwvddbqFrj subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DrhotzwvddbqFrj L$_small_initial_partial_block_DrhotzwvddbqFrj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DrhotzwvddbqFrj: orq %r8,%r8 je L$_after_reduction_DrhotzwvddbqFrj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DrhotzwvddbqFrj: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_8_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_kBlrofzDjhoFnxv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_kBlrofzDjhoFnxv L$_16_blocks_overflow_kBlrofzDjhoFnxv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_kBlrofzDjhoFnxv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_pAdxDizkcbwmjry subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pAdxDizkcbwmjry L$_small_initial_partial_block_pAdxDizkcbwmjry: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pAdxDizkcbwmjry: orq %r8,%r8 je L$_after_reduction_pAdxDizkcbwmjry vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_pAdxDizkcbwmjry: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_9_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_whsqDBkDGaknbAC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_whsqDBkDGaknbAC L$_16_blocks_overflow_whsqDBkDGaknbAC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_whsqDBkDGaknbAC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ranhBavDwnwbdEt subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ranhBavDwnwbdEt L$_small_initial_partial_block_ranhBavDwnwbdEt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ranhBavDwnwbdEt: orq %r8,%r8 je L$_after_reduction_ranhBavDwnwbdEt vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ranhBavDwnwbdEt: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_10_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_GbBbalFokmrvvlx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_GbBbalFokmrvvlx L$_16_blocks_overflow_GbBbalFokmrvvlx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_GbBbalFokmrvvlx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vyxjFnxqwhAbeyi subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vyxjFnxqwhAbeyi L$_small_initial_partial_block_vyxjFnxqwhAbeyi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vyxjFnxqwhAbeyi: orq %r8,%r8 je L$_after_reduction_vyxjFnxqwhAbeyi vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vyxjFnxqwhAbeyi: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_11_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_ldEsDEbywdmplpt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_ldEsDEbywdmplpt L$_16_blocks_overflow_ldEsDEbywdmplpt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_ldEsDEbywdmplpt: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dwbzkfjluwpFvvF subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dwbzkfjluwpFvvF L$_small_initial_partial_block_dwbzkfjluwpFvvF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dwbzkfjluwpFvvF: orq %r8,%r8 je L$_after_reduction_dwbzkfjluwpFvvF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dwbzkfjluwpFvvF: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_12_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_rAsEscwvsFrjwEn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_rAsEscwvsFrjwEn L$_16_blocks_overflow_rAsEscwvsFrjwEn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_rAsEscwvsFrjwEn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qqseyimgvencorf subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qqseyimgvencorf L$_small_initial_partial_block_qqseyimgvencorf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qqseyimgvencorf: orq %r8,%r8 je L$_after_reduction_qqseyimgvencorf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qqseyimgvencorf: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_13_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_yuCmdhwEwEhlsnq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_yuCmdhwEwEhlsnq L$_16_blocks_overflow_yuCmdhwEwEhlsnq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_yuCmdhwEwEhlsnq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qwyeDgkiECyoEct subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qwyeDgkiECyoEct L$_small_initial_partial_block_qwyeDgkiECyoEct: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qwyeDgkiECyoEct: orq %r8,%r8 je L$_after_reduction_qwyeDgkiECyoEct vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qwyeDgkiECyoEct: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_14_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_oEwrswoqGyjlsqe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_oEwrswoqGyjlsqe L$_16_blocks_overflow_oEwrswoqGyjlsqe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_oEwrswoqGyjlsqe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FvgcfpdwFDaojDh subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FvgcfpdwFDaojDh L$_small_initial_partial_block_FvgcfpdwFDaojDh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FvgcfpdwFDaojDh: orq %r8,%r8 je L$_after_reduction_FvgcfpdwFDaojDh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FvgcfpdwFDaojDh: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_15_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_CtjhmwDvAgBsAry vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_CtjhmwDvAgBsAry L$_16_blocks_overflow_CtjhmwDvAgBsAry: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_CtjhmwDvAgBsAry: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rertkxjeyegEbAD subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rertkxjeyegEbAD L$_small_initial_partial_block_rertkxjeyegEbAD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rertkxjeyegEbAD: orq %r8,%r8 je L$_after_reduction_rertkxjeyegEbAD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rertkxjeyegEbAD: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_16_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_ejwsGBcDyFeryCA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_ejwsGBcDyFeryCA L$_16_blocks_overflow_ejwsGBcDyFeryCA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_ejwsGBcDyFeryCA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_vqjlBldpifEzCAi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vqjlBldpifEzCAi: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vqjlBldpifEzCAi: jmp L$_last_blocks_done_lGwozrmByuyygbo L$_last_num_blocks_is_0_lGwozrmByuyygbo: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_lGwozrmByuyygbo: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_icBhFhCkojGgnBc L$_encrypt_32_blocks_icBhFhCkojGgnBc: cmpb $240,%r15b jae L$_16_blocks_overflow_bqdrbusADEaesxh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_bqdrbusADEaesxh L$_16_blocks_overflow_bqdrbusADEaesxh: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_bqdrbusADEaesxh: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_jsiAuvqcAwfrdty vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_jsiAuvqcAwfrdty L$_16_blocks_overflow_jsiAuvqcAwfrdty: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_jsiAuvqcAwfrdty: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_aldutenGmyuhmFz cmpl $8,%r10d je L$_last_num_blocks_is_8_aldutenGmyuhmFz jb L$_last_num_blocks_is_7_1_aldutenGmyuhmFz cmpl $12,%r10d je L$_last_num_blocks_is_12_aldutenGmyuhmFz jb L$_last_num_blocks_is_11_9_aldutenGmyuhmFz cmpl $15,%r10d je L$_last_num_blocks_is_15_aldutenGmyuhmFz ja L$_last_num_blocks_is_16_aldutenGmyuhmFz cmpl $14,%r10d je L$_last_num_blocks_is_14_aldutenGmyuhmFz jmp L$_last_num_blocks_is_13_aldutenGmyuhmFz L$_last_num_blocks_is_11_9_aldutenGmyuhmFz: cmpl $10,%r10d je L$_last_num_blocks_is_10_aldutenGmyuhmFz ja L$_last_num_blocks_is_11_aldutenGmyuhmFz jmp L$_last_num_blocks_is_9_aldutenGmyuhmFz L$_last_num_blocks_is_7_1_aldutenGmyuhmFz: cmpl $4,%r10d je L$_last_num_blocks_is_4_aldutenGmyuhmFz jb L$_last_num_blocks_is_3_1_aldutenGmyuhmFz cmpl $6,%r10d ja L$_last_num_blocks_is_7_aldutenGmyuhmFz je L$_last_num_blocks_is_6_aldutenGmyuhmFz jmp L$_last_num_blocks_is_5_aldutenGmyuhmFz L$_last_num_blocks_is_3_1_aldutenGmyuhmFz: cmpl $2,%r10d ja L$_last_num_blocks_is_3_aldutenGmyuhmFz je L$_last_num_blocks_is_2_aldutenGmyuhmFz L$_last_num_blocks_is_1_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_kounvuokEjmfgDl vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_kounvuokEjmfgDl L$_16_blocks_overflow_kounvuokEjmfgDl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_kounvuokEjmfgDl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_myyjGGFduxDnmrl subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_myyjGGFduxDnmrl L$_small_initial_partial_block_myyjGGFduxDnmrl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_myyjGGFduxDnmrl L$_small_initial_compute_done_myyjGGFduxDnmrl: L$_after_reduction_myyjGGFduxDnmrl: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_2_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_GkcjorkhgDBFApE vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_GkcjorkhgDBFApE L$_16_blocks_overflow_GkcjorkhgDBFApE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_GkcjorkhgDBFApE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_luGrBrcBwGbkypf subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_luGrBrcBwGbkypf L$_small_initial_partial_block_luGrBrcBwGbkypf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_luGrBrcBwGbkypf: orq %r8,%r8 je L$_after_reduction_luGrBrcBwGbkypf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_luGrBrcBwGbkypf: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_3_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_GlGoAfCtaxDnccC vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_GlGoAfCtaxDnccC L$_16_blocks_overflow_GlGoAfCtaxDnccC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_GlGoAfCtaxDnccC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hjFElydehDprmun subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hjFElydehDprmun L$_small_initial_partial_block_hjFElydehDprmun: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hjFElydehDprmun: orq %r8,%r8 je L$_after_reduction_hjFElydehDprmun vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_hjFElydehDprmun: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_4_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_zwfpgGyijsBkpeE vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_zwfpgGyijsBkpeE L$_16_blocks_overflow_zwfpgGyijsBkpeE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_zwfpgGyijsBkpeE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ftaDveFCagABhCd subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ftaDveFCagABhCd L$_small_initial_partial_block_ftaDveFCagABhCd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ftaDveFCagABhCd: orq %r8,%r8 je L$_after_reduction_ftaDveFCagABhCd vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ftaDveFCagABhCd: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_5_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_CizAwbkEgozyasa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_CizAwbkEgozyasa L$_16_blocks_overflow_CizAwbkEgozyasa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_CizAwbkEgozyasa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_uvigeFCkFhxrjol subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_uvigeFCkFhxrjol L$_small_initial_partial_block_uvigeFCkFhxrjol: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_uvigeFCkFhxrjol: orq %r8,%r8 je L$_after_reduction_uvigeFCkFhxrjol vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_uvigeFCkFhxrjol: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_6_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_yuzbpkwFyzjuBAz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_yuzbpkwFyzjuBAz L$_16_blocks_overflow_yuzbpkwFyzjuBAz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_yuzbpkwFyzjuBAz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mGnxEwEsoAvgkoh subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mGnxEwEsoAvgkoh L$_small_initial_partial_block_mGnxEwEsoAvgkoh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mGnxEwEsoAvgkoh: orq %r8,%r8 je L$_after_reduction_mGnxEwEsoAvgkoh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_mGnxEwEsoAvgkoh: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_7_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_fDccaFllCyjzgaw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_fDccaFllCyjzgaw L$_16_blocks_overflow_fDccaFllCyjzgaw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_fDccaFllCyjzgaw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_awcpyfsBbqeAyhp subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_awcpyfsBbqeAyhp L$_small_initial_partial_block_awcpyfsBbqeAyhp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_awcpyfsBbqeAyhp: orq %r8,%r8 je L$_after_reduction_awcpyfsBbqeAyhp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_awcpyfsBbqeAyhp: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_8_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_yuxjCAwGGjlocDt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_yuxjCAwGGjlocDt L$_16_blocks_overflow_yuxjCAwGGjlocDt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_yuxjCAwGGjlocDt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tCmmipfvAEinBtG subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tCmmipfvAEinBtG L$_small_initial_partial_block_tCmmipfvAEinBtG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tCmmipfvAEinBtG: orq %r8,%r8 je L$_after_reduction_tCmmipfvAEinBtG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tCmmipfvAEinBtG: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_9_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_FrborCeuBByFkga vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_FrborCeuBByFkga L$_16_blocks_overflow_FrborCeuBByFkga: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_FrborCeuBByFkga: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rhmklrqdhsjaixG subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rhmklrqdhsjaixG L$_small_initial_partial_block_rhmklrqdhsjaixG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rhmklrqdhsjaixG: orq %r8,%r8 je L$_after_reduction_rhmklrqdhsjaixG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rhmklrqdhsjaixG: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_10_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_uqpvEzAtlprmDsg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_uqpvEzAtlprmDsg L$_16_blocks_overflow_uqpvEzAtlprmDsg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_uqpvEzAtlprmDsg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yslffbaddFCEqwA subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yslffbaddFCEqwA L$_small_initial_partial_block_yslffbaddFCEqwA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yslffbaddFCEqwA: orq %r8,%r8 je L$_after_reduction_yslffbaddFCEqwA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yslffbaddFCEqwA: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_11_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_wyBrnxyfcdFguiF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_wyBrnxyfcdFguiF L$_16_blocks_overflow_wyBrnxyfcdFguiF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_wyBrnxyfcdFguiF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_axnFjCbcEhxjDmF subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_axnFjCbcEhxjDmF L$_small_initial_partial_block_axnFjCbcEhxjDmF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_axnFjCbcEhxjDmF: orq %r8,%r8 je L$_after_reduction_axnFjCbcEhxjDmF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_axnFjCbcEhxjDmF: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_12_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_nbfsGzmFjniAhpc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_nbfsGzmFjniAhpc L$_16_blocks_overflow_nbfsGzmFjniAhpc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_nbfsGzmFjniAhpc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nmuwjreDfxCetjh subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nmuwjreDfxCetjh L$_small_initial_partial_block_nmuwjreDfxCetjh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nmuwjreDfxCetjh: orq %r8,%r8 je L$_after_reduction_nmuwjreDfxCetjh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nmuwjreDfxCetjh: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_13_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_BlpixnjkGtBtzBl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_BlpixnjkGtBtzBl L$_16_blocks_overflow_BlpixnjkGtBtzBl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_BlpixnjkGtBtzBl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FdqyplwEjyoxvwf subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FdqyplwEjyoxvwf L$_small_initial_partial_block_FdqyplwEjyoxvwf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FdqyplwEjyoxvwf: orq %r8,%r8 je L$_after_reduction_FdqyplwEjyoxvwf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FdqyplwEjyoxvwf: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_14_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_nlkisqljGgnlewr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_nlkisqljGgnlewr L$_16_blocks_overflow_nlkisqljGgnlewr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_nlkisqljGgnlewr: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ByszoDfuCgvEska subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ByszoDfuCgvEska L$_small_initial_partial_block_ByszoDfuCgvEska: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ByszoDfuCgvEska: orq %r8,%r8 je L$_after_reduction_ByszoDfuCgvEska vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ByszoDfuCgvEska: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_15_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_FewkqxwDmrjetmG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_FewkqxwDmrjetmG L$_16_blocks_overflow_FewkqxwDmrjetmG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_FewkqxwDmrjetmG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jjbxCtvydaGqepC subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jjbxCtvydaGqepC L$_small_initial_partial_block_jjbxCtvydaGqepC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jjbxCtvydaGqepC: orq %r8,%r8 je L$_after_reduction_jjbxCtvydaGqepC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jjbxCtvydaGqepC: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_16_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_hEoxzbghGBmpbpw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_hEoxzbghGBmpbpw L$_16_blocks_overflow_hEoxzbghGBmpbpw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_hEoxzbghGBmpbpw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_usEFihDgqghhogg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_usEFihDgqghhogg: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_usEFihDgqghhogg: jmp L$_last_blocks_done_aldutenGmyuhmFz L$_last_num_blocks_is_0_aldutenGmyuhmFz: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_aldutenGmyuhmFz: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_icBhFhCkojGgnBc L$_encrypt_16_blocks_icBhFhCkojGgnBc: cmpb $240,%r15b jae L$_16_blocks_overflow_xlvtosuhBBytzsd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_xlvtosuhBBytzsd L$_16_blocks_overflow_xlvtosuhBBytzsd: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_xlvtosuhBBytzsd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_CqqjsGobDovpiom cmpl $8,%r10d je L$_last_num_blocks_is_8_CqqjsGobDovpiom jb L$_last_num_blocks_is_7_1_CqqjsGobDovpiom cmpl $12,%r10d je L$_last_num_blocks_is_12_CqqjsGobDovpiom jb L$_last_num_blocks_is_11_9_CqqjsGobDovpiom cmpl $15,%r10d je L$_last_num_blocks_is_15_CqqjsGobDovpiom ja L$_last_num_blocks_is_16_CqqjsGobDovpiom cmpl $14,%r10d je L$_last_num_blocks_is_14_CqqjsGobDovpiom jmp L$_last_num_blocks_is_13_CqqjsGobDovpiom L$_last_num_blocks_is_11_9_CqqjsGobDovpiom: cmpl $10,%r10d je L$_last_num_blocks_is_10_CqqjsGobDovpiom ja L$_last_num_blocks_is_11_CqqjsGobDovpiom jmp L$_last_num_blocks_is_9_CqqjsGobDovpiom L$_last_num_blocks_is_7_1_CqqjsGobDovpiom: cmpl $4,%r10d je L$_last_num_blocks_is_4_CqqjsGobDovpiom jb L$_last_num_blocks_is_3_1_CqqjsGobDovpiom cmpl $6,%r10d ja L$_last_num_blocks_is_7_CqqjsGobDovpiom je L$_last_num_blocks_is_6_CqqjsGobDovpiom jmp L$_last_num_blocks_is_5_CqqjsGobDovpiom L$_last_num_blocks_is_3_1_CqqjsGobDovpiom: cmpl $2,%r10d ja L$_last_num_blocks_is_3_CqqjsGobDovpiom je L$_last_num_blocks_is_2_CqqjsGobDovpiom L$_last_num_blocks_is_1_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_iCcBbEaCnnBtiGz vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_iCcBbEaCnnBtiGz L$_16_blocks_overflow_iCcBbEaCnnBtiGz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_iCcBbEaCnnBtiGz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_himCBxsCzdjqdtp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_himCBxsCzdjqdtp L$_small_initial_partial_block_himCBxsCzdjqdtp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_himCBxsCzdjqdtp L$_small_initial_compute_done_himCBxsCzdjqdtp: L$_after_reduction_himCBxsCzdjqdtp: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_2_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_spdFufbAAGcAxFf vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_spdFufbAAGcAxFf L$_16_blocks_overflow_spdFufbAAGcAxFf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_spdFufbAAGcAxFf: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vdGxihcuFDvcDGx subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vdGxihcuFDvcDGx L$_small_initial_partial_block_vdGxihcuFDvcDGx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vdGxihcuFDvcDGx: orq %r8,%r8 je L$_after_reduction_vdGxihcuFDvcDGx vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vdGxihcuFDvcDGx: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_3_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_sBAazunogzDjqho vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_sBAazunogzDjqho L$_16_blocks_overflow_sBAazunogzDjqho: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_sBAazunogzDjqho: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tldtpncdejgAGjh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tldtpncdejgAGjh L$_small_initial_partial_block_tldtpncdejgAGjh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tldtpncdejgAGjh: orq %r8,%r8 je L$_after_reduction_tldtpncdejgAGjh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tldtpncdejgAGjh: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_4_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_sekyjhofosAtkyB vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_sekyjhofosAtkyB L$_16_blocks_overflow_sekyjhofosAtkyB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_sekyjhofosAtkyB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wkomnalwByedats subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wkomnalwByedats L$_small_initial_partial_block_wkomnalwByedats: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wkomnalwByedats: orq %r8,%r8 je L$_after_reduction_wkomnalwByedats vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wkomnalwByedats: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_5_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_zdkGskjaniDljeq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_zdkGskjaniDljeq L$_16_blocks_overflow_zdkGskjaniDljeq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_zdkGskjaniDljeq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nCkgxpzwqEAtDfb subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nCkgxpzwqEAtDfb L$_small_initial_partial_block_nCkgxpzwqEAtDfb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nCkgxpzwqEAtDfb: orq %r8,%r8 je L$_after_reduction_nCkgxpzwqEAtDfb vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nCkgxpzwqEAtDfb: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_6_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_mrylAcnDjuqklnd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_mrylAcnDjuqklnd L$_16_blocks_overflow_mrylAcnDjuqklnd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_mrylAcnDjuqklnd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dtDgucpjyaambao subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dtDgucpjyaambao L$_small_initial_partial_block_dtDgucpjyaambao: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dtDgucpjyaambao: orq %r8,%r8 je L$_after_reduction_dtDgucpjyaambao vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dtDgucpjyaambao: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_7_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_ektccsvjwlnFwnw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_ektccsvjwlnFwnw L$_16_blocks_overflow_ektccsvjwlnFwnw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_ektccsvjwlnFwnw: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rbfqryodaBgimfn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rbfqryodaBgimfn L$_small_initial_partial_block_rbfqryodaBgimfn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rbfqryodaBgimfn: orq %r8,%r8 je L$_after_reduction_rbfqryodaBgimfn vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rbfqryodaBgimfn: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_8_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_GGmuDhkjBtqxcEd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_GGmuDhkjBtqxcEd L$_16_blocks_overflow_GGmuDhkjBtqxcEd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_GGmuDhkjBtqxcEd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_aapCFFxCFiAoabs subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_aapCFFxCFiAoabs L$_small_initial_partial_block_aapCFFxCFiAoabs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_aapCFFxCFiAoabs: orq %r8,%r8 je L$_after_reduction_aapCFFxCFiAoabs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_aapCFFxCFiAoabs: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_9_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_lDwlixsAzhAgDkG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_lDwlixsAzhAgDkG L$_16_blocks_overflow_lDwlixsAzhAgDkG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_lDwlixsAzhAgDkG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ixzDxvojEApEnCt subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ixzDxvojEApEnCt L$_small_initial_partial_block_ixzDxvojEApEnCt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ixzDxvojEApEnCt: orq %r8,%r8 je L$_after_reduction_ixzDxvojEApEnCt vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ixzDxvojEApEnCt: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_10_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_BbbzknmqtuDuEfg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_BbbzknmqtuDuEfg L$_16_blocks_overflow_BbbzknmqtuDuEfg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_BbbzknmqtuDuEfg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ocgDwclfceuanoy subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ocgDwclfceuanoy L$_small_initial_partial_block_ocgDwclfceuanoy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ocgDwclfceuanoy: orq %r8,%r8 je L$_after_reduction_ocgDwclfceuanoy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ocgDwclfceuanoy: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_11_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_jatgakEfrDmqCyG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_jatgakEfrDmqCyG L$_16_blocks_overflow_jatgakEfrDmqCyG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_jatgakEfrDmqCyG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tklDcEsdEdnDloA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tklDcEsdEdnDloA L$_small_initial_partial_block_tklDcEsdEdnDloA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tklDcEsdEdnDloA: orq %r8,%r8 je L$_after_reduction_tklDcEsdEdnDloA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tklDcEsdEdnDloA: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_12_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_tovGfhABebkuFEt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_tovGfhABebkuFEt L$_16_blocks_overflow_tovGfhABebkuFEt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_tovGfhABebkuFEt: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EEeschrlAysrrgg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EEeschrlAysrrgg L$_small_initial_partial_block_EEeschrlAysrrgg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EEeschrlAysrrgg: orq %r8,%r8 je L$_after_reduction_EEeschrlAysrrgg vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EEeschrlAysrrgg: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_13_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_ChCrwqCswoEpicz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_ChCrwqCswoEpicz L$_16_blocks_overflow_ChCrwqCswoEpicz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_ChCrwqCswoEpicz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iBgxbAnxnejeaAD subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iBgxbAnxnejeaAD L$_small_initial_partial_block_iBgxbAnxnejeaAD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iBgxbAnxnejeaAD: orq %r8,%r8 je L$_after_reduction_iBgxbAnxnejeaAD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iBgxbAnxnejeaAD: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_14_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_GzibzgsizEbkyAE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_GzibzgsizEbkyAE L$_16_blocks_overflow_GzibzgsizEbkyAE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_GzibzgsizEbkyAE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ulArrmByoEAEezF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ulArrmByoEAEezF L$_small_initial_partial_block_ulArrmByoEAEezF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ulArrmByoEAEezF: orq %r8,%r8 je L$_after_reduction_ulArrmByoEAEezF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ulArrmByoEAEezF: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_15_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_DExqfkaBzzhxtrd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_DExqfkaBzzhxtrd L$_16_blocks_overflow_DExqfkaBzzhxtrd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_DExqfkaBzzhxtrd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zqfGgrfeCzzwkzB subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zqfGgrfeCzzwkzB L$_small_initial_partial_block_zqfGgrfeCzzwkzB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zqfGgrfeCzzwkzB: orq %r8,%r8 je L$_after_reduction_zqfGgrfeCzzwkzB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_zqfGgrfeCzzwkzB: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_16_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_fanaekDAulfkhcb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_fanaekDAulfkhcb L$_16_blocks_overflow_fanaekDAulfkhcb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_fanaekDAulfkhcb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_pCDjmBApGDgFGhw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pCDjmBApGDgFGhw: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_pCDjmBApGDgFGhw: jmp L$_last_blocks_done_CqqjsGobDovpiom L$_last_num_blocks_is_0_CqqjsGobDovpiom: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_CqqjsGobDovpiom: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_icBhFhCkojGgnBc L$_message_below_32_blocks_icBhFhCkojGgnBc: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_lurmstfAeByrDpz vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) L$_skip_hkeys_precomputation_lurmstfAeByrDpz: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_kpvFtqCzpagsbmy cmpl $8,%r10d je L$_last_num_blocks_is_8_kpvFtqCzpagsbmy jb L$_last_num_blocks_is_7_1_kpvFtqCzpagsbmy cmpl $12,%r10d je L$_last_num_blocks_is_12_kpvFtqCzpagsbmy jb L$_last_num_blocks_is_11_9_kpvFtqCzpagsbmy cmpl $15,%r10d je L$_last_num_blocks_is_15_kpvFtqCzpagsbmy ja L$_last_num_blocks_is_16_kpvFtqCzpagsbmy cmpl $14,%r10d je L$_last_num_blocks_is_14_kpvFtqCzpagsbmy jmp L$_last_num_blocks_is_13_kpvFtqCzpagsbmy L$_last_num_blocks_is_11_9_kpvFtqCzpagsbmy: cmpl $10,%r10d je L$_last_num_blocks_is_10_kpvFtqCzpagsbmy ja L$_last_num_blocks_is_11_kpvFtqCzpagsbmy jmp L$_last_num_blocks_is_9_kpvFtqCzpagsbmy L$_last_num_blocks_is_7_1_kpvFtqCzpagsbmy: cmpl $4,%r10d je L$_last_num_blocks_is_4_kpvFtqCzpagsbmy jb L$_last_num_blocks_is_3_1_kpvFtqCzpagsbmy cmpl $6,%r10d ja L$_last_num_blocks_is_7_kpvFtqCzpagsbmy je L$_last_num_blocks_is_6_kpvFtqCzpagsbmy jmp L$_last_num_blocks_is_5_kpvFtqCzpagsbmy L$_last_num_blocks_is_3_1_kpvFtqCzpagsbmy: cmpl $2,%r10d ja L$_last_num_blocks_is_3_kpvFtqCzpagsbmy je L$_last_num_blocks_is_2_kpvFtqCzpagsbmy L$_last_num_blocks_is_1_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_erjqEcdgnsabCAp vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_erjqEcdgnsabCAp L$_16_blocks_overflow_erjqEcdgnsabCAp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_erjqEcdgnsabCAp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_tihlhrngdnEcfCn subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tihlhrngdnEcfCn L$_small_initial_partial_block_tihlhrngdnEcfCn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_tihlhrngdnEcfCn L$_small_initial_compute_done_tihlhrngdnEcfCn: L$_after_reduction_tihlhrngdnEcfCn: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_2_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_AibviGpsltwhwck vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_AibviGpsltwhwck L$_16_blocks_overflow_AibviGpsltwhwck: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_AibviGpsltwhwck: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_CGytiedGuwlshAl subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_CGytiedGuwlshAl L$_small_initial_partial_block_CGytiedGuwlshAl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_CGytiedGuwlshAl: orq %r8,%r8 je L$_after_reduction_CGytiedGuwlshAl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_CGytiedGuwlshAl: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_3_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_cwyoDiaxggCofzt vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_cwyoDiaxggCofzt L$_16_blocks_overflow_cwyoDiaxggCofzt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_cwyoDiaxggCofzt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_khdhzwEsobgrlgi subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_khdhzwEsobgrlgi L$_small_initial_partial_block_khdhzwEsobgrlgi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_khdhzwEsobgrlgi: orq %r8,%r8 je L$_after_reduction_khdhzwEsobgrlgi vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_khdhzwEsobgrlgi: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_4_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_fqeFwlbvdGyejoA vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_fqeFwlbvdGyejoA L$_16_blocks_overflow_fqeFwlbvdGyejoA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_fqeFwlbvdGyejoA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_bzlErbuuhovEdpE subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_bzlErbuuhovEdpE L$_small_initial_partial_block_bzlErbuuhovEdpE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bzlErbuuhovEdpE: orq %r8,%r8 je L$_after_reduction_bzlErbuuhovEdpE vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_bzlErbuuhovEdpE: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_5_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_cjnavuxfcgGzzCb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_cjnavuxfcgGzzCb L$_16_blocks_overflow_cjnavuxfcgGzzCb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_cjnavuxfcgGzzCb: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_eaEmrzsvCBDlnpC subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_eaEmrzsvCBDlnpC L$_small_initial_partial_block_eaEmrzsvCBDlnpC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_eaEmrzsvCBDlnpC: orq %r8,%r8 je L$_after_reduction_eaEmrzsvCBDlnpC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_eaEmrzsvCBDlnpC: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_6_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_DndbknmyrzkriDg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_DndbknmyrzkriDg L$_16_blocks_overflow_DndbknmyrzkriDg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_DndbknmyrzkriDg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_geoBBGllnatlCqq subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_geoBBGllnatlCqq L$_small_initial_partial_block_geoBBGllnatlCqq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_geoBBGllnatlCqq: orq %r8,%r8 je L$_after_reduction_geoBBGllnatlCqq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_geoBBGllnatlCqq: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_7_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_jtGaGqFaokuwcFo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_jtGaGqFaokuwcFo L$_16_blocks_overflow_jtGaGqFaokuwcFo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_jtGaGqFaokuwcFo: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xkeqvjpCBEjlkGx subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xkeqvjpCBEjlkGx L$_small_initial_partial_block_xkeqvjpCBEjlkGx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xkeqvjpCBEjlkGx: orq %r8,%r8 je L$_after_reduction_xkeqvjpCBEjlkGx vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_xkeqvjpCBEjlkGx: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_8_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_BCegvazduGiwBqv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_BCegvazduGiwBqv L$_16_blocks_overflow_BCegvazduGiwBqv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_BCegvazduGiwBqv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_fjDnDwdgfswBjwp subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_fjDnDwdgfswBjwp L$_small_initial_partial_block_fjDnDwdgfswBjwp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_fjDnDwdgfswBjwp: orq %r8,%r8 je L$_after_reduction_fjDnDwdgfswBjwp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_fjDnDwdgfswBjwp: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_9_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_nGczFFdvDDdbdAl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_nGczFFdvDDdbdAl L$_16_blocks_overflow_nGczFFdvDDdbdAl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_nGczFFdvDDdbdAl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FEodvvDmqnsbxoz subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FEodvvDmqnsbxoz L$_small_initial_partial_block_FEodvvDmqnsbxoz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FEodvvDmqnsbxoz: orq %r8,%r8 je L$_after_reduction_FEodvvDmqnsbxoz vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FEodvvDmqnsbxoz: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_10_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_oulxbBotdhvdFbg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_oulxbBotdhvdFbg L$_16_blocks_overflow_oulxbBotdhvdFbg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_oulxbBotdhvdFbg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mAhDuzfffzBcqnw subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mAhDuzfffzBcqnw L$_small_initial_partial_block_mAhDuzfffzBcqnw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mAhDuzfffzBcqnw: orq %r8,%r8 je L$_after_reduction_mAhDuzfffzBcqnw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_mAhDuzfffzBcqnw: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_11_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_nCertFgkfoCxtun vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_nCertFgkfoCxtun L$_16_blocks_overflow_nCertFgkfoCxtun: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_nCertFgkfoCxtun: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dygAbwCGlokBzAu subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dygAbwCGlokBzAu L$_small_initial_partial_block_dygAbwCGlokBzAu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dygAbwCGlokBzAu: orq %r8,%r8 je L$_after_reduction_dygAbwCGlokBzAu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dygAbwCGlokBzAu: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_12_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_DtwkcFbdCfdcCrh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_DtwkcFbdCfdcCrh L$_16_blocks_overflow_DtwkcFbdCfdcCrh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_DtwkcFbdCfdcCrh: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_uelgeBErnEDceCF subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_uelgeBErnEDceCF L$_small_initial_partial_block_uelgeBErnEDceCF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_uelgeBErnEDceCF: orq %r8,%r8 je L$_after_reduction_uelgeBErnEDceCF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_uelgeBErnEDceCF: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_13_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_ndumifgEEuiqDiF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_ndumifgEEuiqDiF L$_16_blocks_overflow_ndumifgEEuiqDiF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_ndumifgEEuiqDiF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DrrvrAjlkiwmAzx subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DrrvrAjlkiwmAzx L$_small_initial_partial_block_DrrvrAjlkiwmAzx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DrrvrAjlkiwmAzx: orq %r8,%r8 je L$_after_reduction_DrrvrAjlkiwmAzx vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DrrvrAjlkiwmAzx: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_14_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_osDGzgifEhqjECm vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_osDGzgifEhqjECm L$_16_blocks_overflow_osDGzgifEhqjECm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_osDGzgifEhqjECm: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_fwjCFubGdkywpFz subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_fwjCFubGdkywpFz L$_small_initial_partial_block_fwjCFubGdkywpFz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_fwjCFubGdkywpFz: orq %r8,%r8 je L$_after_reduction_fwjCFubGdkywpFz vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_fwjCFubGdkywpFz: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_15_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_tiCBFudBnEgekda vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_tiCBFudBnEgekda L$_16_blocks_overflow_tiCBFudBnEgekda: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_tiCBFudBnEgekda: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_sbduutzwEklCDpB subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_sbduutzwEklCDpB L$_small_initial_partial_block_sbduutzwEklCDpB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_sbduutzwEklCDpB: orq %r8,%r8 je L$_after_reduction_sbduutzwEklCDpB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_sbduutzwEklCDpB: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_16_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_ennneCoBjzBsijF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_ennneCoBjzBsijF L$_16_blocks_overflow_ennneCoBjzBsijF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_ennneCoBjzBsijF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_qbevliloqkkkFsD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qbevliloqkkkFsD: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qbevliloqkkkFsD: jmp L$_last_blocks_done_kpvFtqCzpagsbmy L$_last_num_blocks_is_0_kpvFtqCzpagsbmy: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_kpvFtqCzpagsbmy: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_icBhFhCkojGgnBc L$_message_below_equal_16_blocks_icBhFhCkojGgnBc: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je L$_small_initial_num_blocks_is_8_fAioGdenAmmvupb jl L$_small_initial_num_blocks_is_7_1_fAioGdenAmmvupb cmpq $12,%r12 je L$_small_initial_num_blocks_is_12_fAioGdenAmmvupb jl L$_small_initial_num_blocks_is_11_9_fAioGdenAmmvupb cmpq $16,%r12 je L$_small_initial_num_blocks_is_16_fAioGdenAmmvupb cmpq $15,%r12 je L$_small_initial_num_blocks_is_15_fAioGdenAmmvupb cmpq $14,%r12 je L$_small_initial_num_blocks_is_14_fAioGdenAmmvupb jmp L$_small_initial_num_blocks_is_13_fAioGdenAmmvupb L$_small_initial_num_blocks_is_11_9_fAioGdenAmmvupb: cmpq $11,%r12 je L$_small_initial_num_blocks_is_11_fAioGdenAmmvupb cmpq $10,%r12 je L$_small_initial_num_blocks_is_10_fAioGdenAmmvupb jmp L$_small_initial_num_blocks_is_9_fAioGdenAmmvupb L$_small_initial_num_blocks_is_7_1_fAioGdenAmmvupb: cmpq $4,%r12 je L$_small_initial_num_blocks_is_4_fAioGdenAmmvupb jl L$_small_initial_num_blocks_is_3_1_fAioGdenAmmvupb cmpq $7,%r12 je L$_small_initial_num_blocks_is_7_fAioGdenAmmvupb cmpq $6,%r12 je L$_small_initial_num_blocks_is_6_fAioGdenAmmvupb jmp L$_small_initial_num_blocks_is_5_fAioGdenAmmvupb L$_small_initial_num_blocks_is_3_1_fAioGdenAmmvupb: cmpq $3,%r12 je L$_small_initial_num_blocks_is_3_fAioGdenAmmvupb cmpq $2,%r12 je L$_small_initial_num_blocks_is_2_fAioGdenAmmvupb L$_small_initial_num_blocks_is_1_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm6,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_adigDqnunatgwqg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_adigDqnunatgwqg L$_small_initial_partial_block_adigDqnunatgwqg: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp L$_after_reduction_adigDqnunatgwqg L$_small_initial_compute_done_adigDqnunatgwqg: L$_after_reduction_adigDqnunatgwqg: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_2_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm6,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wijsfgBfoycrhbf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wijsfgBfoycrhbf L$_small_initial_partial_block_wijsfgBfoycrhbf: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wijsfgBfoycrhbf: orq %r8,%r8 je L$_after_reduction_wijsfgBfoycrhbf vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_wijsfgBfoycrhbf: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_3_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ibqkzvjmvrGthss subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ibqkzvjmvrGthss L$_small_initial_partial_block_ibqkzvjmvrGthss: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ibqkzvjmvrGthss: orq %r8,%r8 je L$_after_reduction_ibqkzvjmvrGthss vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_ibqkzvjmvrGthss: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_4_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xlbdECcsDitBbrC subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xlbdECcsDitBbrC L$_small_initial_partial_block_xlbdECcsDitBbrC: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xlbdECcsDitBbrC: orq %r8,%r8 je L$_after_reduction_xlbdECcsDitBbrC vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_xlbdECcsDitBbrC: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_5_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %xmm29,%xmm7,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AvBuAcGaAAhviww subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AvBuAcGaAAhviww L$_small_initial_partial_block_AvBuAcGaAAhviww: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AvBuAcGaAAhviww: orq %r8,%r8 je L$_after_reduction_AvBuAcGaAAhviww vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_AvBuAcGaAAhviww: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_6_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %ymm29,%ymm7,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dmnwagjDbfGuxqa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dmnwagjDbfGuxqa L$_small_initial_partial_block_dmnwagjDbfGuxqa: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dmnwagjDbfGuxqa: orq %r8,%r8 je L$_after_reduction_dmnwagjDbfGuxqa vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_dmnwagjDbfGuxqa: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_7_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FFvlakmlCAfckcF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FFvlakmlCAfckcF L$_small_initial_partial_block_FFvlakmlCAfckcF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FFvlakmlCAfckcF: orq %r8,%r8 je L$_after_reduction_FFvlakmlCAfckcF vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_FFvlakmlCAfckcF: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_8_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_brxnjeBcvFoBFjp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_brxnjeBcvFoBFjp L$_small_initial_partial_block_brxnjeBcvFoBFjp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_brxnjeBcvFoBFjp: orq %r8,%r8 je L$_after_reduction_brxnjeBcvFoBFjp vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_brxnjeBcvFoBFjp: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_9_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %xmm29,%xmm10,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ruEgsxDerxegpsB subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ruEgsxDerxegpsB L$_small_initial_partial_block_ruEgsxDerxegpsB: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ruEgsxDerxegpsB: orq %r8,%r8 je L$_after_reduction_ruEgsxDerxegpsB vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_ruEgsxDerxegpsB: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_10_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %ymm29,%ymm10,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DtsnEBEgqapGgkD subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DtsnEBEgqapGgkD L$_small_initial_partial_block_DtsnEBEgqapGgkD: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DtsnEBEgqapGgkD: orq %r8,%r8 je L$_after_reduction_DtsnEBEgqapGgkD vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_DtsnEBEgqapGgkD: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_11_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FAlijzFrzEsACFt subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FAlijzFrzEsACFt L$_small_initial_partial_block_FAlijzFrzEsACFt: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FAlijzFrzEsACFt: orq %r8,%r8 je L$_after_reduction_FAlijzFrzEsACFt vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_FAlijzFrzEsACFt: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_12_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xdDFiiniApojwBg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xdDFiiniApojwBg L$_small_initial_partial_block_xdDFiiniApojwBg: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xdDFiiniApojwBg: orq %r8,%r8 je L$_after_reduction_xdDFiiniApojwBg vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_xdDFiiniApojwBg: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_13_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %xmm29,%xmm11,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nwkAjutBGaaatpl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nwkAjutBGaaatpl L$_small_initial_partial_block_nwkAjutBGaaatpl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nwkAjutBGaaatpl: orq %r8,%r8 je L$_after_reduction_nwkAjutBGaaatpl vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_nwkAjutBGaaatpl: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_14_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %ymm29,%ymm11,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ffynzdqrsbdreFk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ffynzdqrsbdreFk L$_small_initial_partial_block_ffynzdqrsbdreFk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ffynzdqrsbdreFk: orq %r8,%r8 je L$_after_reduction_ffynzdqrsbdreFk vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_ffynzdqrsbdreFk: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_15_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hbmAeclAGCyurof subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hbmAeclAGCyurof L$_small_initial_partial_block_hbmAeclAGCyurof: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hbmAeclAGCyurof: orq %r8,%r8 je L$_after_reduction_hbmAeclAGCyurof vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_hbmAeclAGCyurof: jmp L$_small_initial_blocks_encrypted_fAioGdenAmmvupb L$_small_initial_num_blocks_is_16_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_fvqkmnelfBwdflt: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_fvqkmnelfBwdflt: vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_fvqkmnelfBwdflt: L$_small_initial_blocks_encrypted_fAioGdenAmmvupb: L$_ghash_done_icBhFhCkojGgnBc: vmovdqu64 %xmm2,0(%rsi) L$_enc_dec_done_icBhFhCkojGgnBc: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) L$_enc_dec_abort_icBhFhCkojGgnBc: jmp L$exit_gcm_decrypt .p2align 5 L$aes_gcm_decrypt_192_avx512: orq %r8,%r8 je L$_enc_dec_abort_efvnrtvwAsfehEC xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je L$_partial_block_done_Fvzomuuccfdfevt movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vmovdqa64 %xmm0,%xmm6 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge L$_no_extra_mask_Fvzomuuccfdfevt subq %r13,%r12 L$_no_extra_mask_Fvzomuuccfdfevt: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpand %xmm0,%xmm6,%xmm6 vpshufb SHUF_MASK(%rip),%xmm6,%xmm6 vpshufb %xmm5,%xmm6,%xmm6 vpxorq %xmm6,%xmm14,%xmm14 cmpq $0,%r13 jl L$_partial_incomplete_Fvzomuuccfdfevt .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp L$_enc_dec_done_Fvzomuuccfdfevt L$_partial_incomplete_Fvzomuuccfdfevt: addl %r8d,(%rdx) movq %r8,%rax L$_enc_dec_done_Fvzomuuccfdfevt: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} L$_partial_block_done_Fvzomuuccfdfevt: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je L$_enc_dec_done_efvnrtvwAsfehEC cmpq $256,%r8 jbe L$_message_below_equal_16_blocks_efvnrtvwAsfehEC vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae L$_next_16_overflow_bmCGDqjpElhfFfq vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_bmCGDqjpElhfFfq L$_next_16_overflow_bmCGDqjpElhfFfq: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_bmCGDqjpElhfFfq: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_mbihlziFEFsDoGE vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) L$_skip_hkeys_precomputation_mbihlziFEFsDoGE: cmpq $512,%r8 jb L$_message_below_32_blocks_efvnrtvwAsfehEC cmpb $240,%r15b jae L$_next_16_overflow_lakxgokamypkjgE vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_lakxgokamypkjgE L$_next_16_overflow_lakxgokamypkjgE: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_lakxgokamypkjgE: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz L$_skip_hkeys_precomputation_jpElfyvBextCmie vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) L$_skip_hkeys_precomputation_jpElfyvBextCmie: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb L$_no_more_big_nblocks_efvnrtvwAsfehEC L$_encrypt_big_nblocks_efvnrtvwAsfehEC: cmpb $240,%r15b jae L$_16_blocks_overflow_AlopGldBavsssnG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_AlopGldBavsssnG L$_16_blocks_overflow_AlopGldBavsssnG: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_AlopGldBavsssnG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_gaBCwkclDxgqitC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_gaBCwkclDxgqitC L$_16_blocks_overflow_gaBCwkclDxgqitC: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_gaBCwkclDxgqitC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_Fxngerofutwuigg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_Fxngerofutwuigg L$_16_blocks_overflow_Fxngerofutwuigg: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_Fxngerofutwuigg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae L$_encrypt_big_nblocks_efvnrtvwAsfehEC L$_no_more_big_nblocks_efvnrtvwAsfehEC: cmpq $512,%r8 jae L$_encrypt_32_blocks_efvnrtvwAsfehEC cmpq $256,%r8 jae L$_encrypt_16_blocks_efvnrtvwAsfehEC L$_encrypt_0_blocks_ghash_32_efvnrtvwAsfehEC: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_xtiyCEhGGvgkorn cmpl $8,%r10d je L$_last_num_blocks_is_8_xtiyCEhGGvgkorn jb L$_last_num_blocks_is_7_1_xtiyCEhGGvgkorn cmpl $12,%r10d je L$_last_num_blocks_is_12_xtiyCEhGGvgkorn jb L$_last_num_blocks_is_11_9_xtiyCEhGGvgkorn cmpl $15,%r10d je L$_last_num_blocks_is_15_xtiyCEhGGvgkorn ja L$_last_num_blocks_is_16_xtiyCEhGGvgkorn cmpl $14,%r10d je L$_last_num_blocks_is_14_xtiyCEhGGvgkorn jmp L$_last_num_blocks_is_13_xtiyCEhGGvgkorn L$_last_num_blocks_is_11_9_xtiyCEhGGvgkorn: cmpl $10,%r10d je L$_last_num_blocks_is_10_xtiyCEhGGvgkorn ja L$_last_num_blocks_is_11_xtiyCEhGGvgkorn jmp L$_last_num_blocks_is_9_xtiyCEhGGvgkorn L$_last_num_blocks_is_7_1_xtiyCEhGGvgkorn: cmpl $4,%r10d je L$_last_num_blocks_is_4_xtiyCEhGGvgkorn jb L$_last_num_blocks_is_3_1_xtiyCEhGGvgkorn cmpl $6,%r10d ja L$_last_num_blocks_is_7_xtiyCEhGGvgkorn je L$_last_num_blocks_is_6_xtiyCEhGGvgkorn jmp L$_last_num_blocks_is_5_xtiyCEhGGvgkorn L$_last_num_blocks_is_3_1_xtiyCEhGGvgkorn: cmpl $2,%r10d ja L$_last_num_blocks_is_3_xtiyCEhGGvgkorn je L$_last_num_blocks_is_2_xtiyCEhGGvgkorn L$_last_num_blocks_is_1_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_nlADBBgdbvxiiEb vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_nlADBBgdbvxiiEb L$_16_blocks_overflow_nlADBBgdbvxiiEb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_nlADBBgdbvxiiEb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_tqujgvqggqpCibu subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tqujgvqggqpCibu L$_small_initial_partial_block_tqujgvqggqpCibu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_tqujgvqggqpCibu L$_small_initial_compute_done_tqujgvqggqpCibu: L$_after_reduction_tqujgvqggqpCibu: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_2_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_uvnjGlBDyvrfirm vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_uvnjGlBDyvrfirm L$_16_blocks_overflow_uvnjGlBDyvrfirm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_uvnjGlBDyvrfirm: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wDeAjiDoocmqspC subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wDeAjiDoocmqspC L$_small_initial_partial_block_wDeAjiDoocmqspC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wDeAjiDoocmqspC: orq %r8,%r8 je L$_after_reduction_wDeAjiDoocmqspC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wDeAjiDoocmqspC: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_3_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_FgovsDdCfEGrkbF vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_FgovsDdCfEGrkbF L$_16_blocks_overflow_FgovsDdCfEGrkbF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_FgovsDdCfEGrkbF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vofcBkyofakpciE subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vofcBkyofakpciE L$_small_initial_partial_block_vofcBkyofakpciE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vofcBkyofakpciE: orq %r8,%r8 je L$_after_reduction_vofcBkyofakpciE vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vofcBkyofakpciE: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_4_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_DlimwiDzackronx vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_DlimwiDzackronx L$_16_blocks_overflow_DlimwiDzackronx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_DlimwiDzackronx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_umExCeAmGaBqmig subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_umExCeAmGaBqmig L$_small_initial_partial_block_umExCeAmGaBqmig: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_umExCeAmGaBqmig: orq %r8,%r8 je L$_after_reduction_umExCeAmGaBqmig vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_umExCeAmGaBqmig: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_5_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_qGrgsssqhFxDdtg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_qGrgsssqhFxDdtg L$_16_blocks_overflow_qGrgsssqhFxDdtg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_qGrgsssqhFxDdtg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DkwztEgqyefkjcA subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DkwztEgqyefkjcA L$_small_initial_partial_block_DkwztEgqyefkjcA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DkwztEgqyefkjcA: orq %r8,%r8 je L$_after_reduction_DkwztEgqyefkjcA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DkwztEgqyefkjcA: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_6_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_ufEGEnqpAFAEymx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_ufEGEnqpAFAEymx L$_16_blocks_overflow_ufEGEnqpAFAEymx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_ufEGEnqpAFAEymx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AjqanfyCsBedpsg subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AjqanfyCsBedpsg L$_small_initial_partial_block_AjqanfyCsBedpsg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AjqanfyCsBedpsg: orq %r8,%r8 je L$_after_reduction_AjqanfyCsBedpsg vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AjqanfyCsBedpsg: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_7_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_xgpGrqoEEApwzGE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_xgpGrqoEEApwzGE L$_16_blocks_overflow_xgpGrqoEEApwzGE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_xgpGrqoEEApwzGE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lrumjmlatrsmlag subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lrumjmlatrsmlag L$_small_initial_partial_block_lrumjmlatrsmlag: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lrumjmlatrsmlag: orq %r8,%r8 je L$_after_reduction_lrumjmlatrsmlag vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lrumjmlatrsmlag: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_8_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_DBafwcnsvcxAbsv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_DBafwcnsvcxAbsv L$_16_blocks_overflow_DBafwcnsvcxAbsv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_DBafwcnsvcxAbsv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vptqcrjpEiCjEDi subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vptqcrjpEiCjEDi L$_small_initial_partial_block_vptqcrjpEiCjEDi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vptqcrjpEiCjEDi: orq %r8,%r8 je L$_after_reduction_vptqcrjpEiCjEDi vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vptqcrjpEiCjEDi: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_9_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_muonozkGretEzbg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_muonozkGretEzbg L$_16_blocks_overflow_muonozkGretEzbg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_muonozkGretEzbg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tkpprjhbsieissq subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tkpprjhbsieissq L$_small_initial_partial_block_tkpprjhbsieissq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tkpprjhbsieissq: orq %r8,%r8 je L$_after_reduction_tkpprjhbsieissq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tkpprjhbsieissq: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_10_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_tcxAtedExcFvxwb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_tcxAtedExcFvxwb L$_16_blocks_overflow_tcxAtedExcFvxwb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_tcxAtedExcFvxwb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jcddyvvAxCAjvqC subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jcddyvvAxCAjvqC L$_small_initial_partial_block_jcddyvvAxCAjvqC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jcddyvvAxCAjvqC: orq %r8,%r8 je L$_after_reduction_jcddyvvAxCAjvqC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jcddyvvAxCAjvqC: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_11_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_oCyoemhjBbobeot vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_oCyoemhjBbobeot L$_16_blocks_overflow_oCyoemhjBbobeot: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_oCyoemhjBbobeot: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DusiGqzupzswzGi subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DusiGqzupzswzGi L$_small_initial_partial_block_DusiGqzupzswzGi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DusiGqzupzswzGi: orq %r8,%r8 je L$_after_reduction_DusiGqzupzswzGi vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DusiGqzupzswzGi: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_12_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_rechbAAmkFuppsn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_rechbAAmkFuppsn L$_16_blocks_overflow_rechbAAmkFuppsn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_rechbAAmkFuppsn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lgDrfakaDoGugoh subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lgDrfakaDoGugoh L$_small_initial_partial_block_lgDrfakaDoGugoh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lgDrfakaDoGugoh: orq %r8,%r8 je L$_after_reduction_lgDrfakaDoGugoh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lgDrfakaDoGugoh: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_13_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_llFkwrFhuxfvsGD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_llFkwrFhuxfvsGD L$_16_blocks_overflow_llFkwrFhuxfvsGD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_llFkwrFhuxfvsGD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qwqiEdfkpnfpFcA subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qwqiEdfkpnfpFcA L$_small_initial_partial_block_qwqiEdfkpnfpFcA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qwqiEdfkpnfpFcA: orq %r8,%r8 je L$_after_reduction_qwqiEdfkpnfpFcA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qwqiEdfkpnfpFcA: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_14_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_euGgDuqlvgCFoFG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_euGgDuqlvgCFoFG L$_16_blocks_overflow_euGgDuqlvgCFoFG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_euGgDuqlvgCFoFG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jAmrCFqArnxiBwr subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jAmrCFqArnxiBwr L$_small_initial_partial_block_jAmrCFqArnxiBwr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jAmrCFqArnxiBwr: orq %r8,%r8 je L$_after_reduction_jAmrCFqArnxiBwr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jAmrCFqArnxiBwr: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_15_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_pFsoEbjdpyaFdzt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_pFsoEbjdpyaFdzt L$_16_blocks_overflow_pFsoEbjdpyaFdzt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_pFsoEbjdpyaFdzt: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ynvaqdiwqpExsAh subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ynvaqdiwqpExsAh L$_small_initial_partial_block_ynvaqdiwqpExsAh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ynvaqdiwqpExsAh: orq %r8,%r8 je L$_after_reduction_ynvaqdiwqpExsAh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ynvaqdiwqpExsAh: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_16_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_lxzkkenajCqycbF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_lxzkkenajCqycbF L$_16_blocks_overflow_lxzkkenajCqycbF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_lxzkkenajCqycbF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_rerkgBbyampldto: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rerkgBbyampldto: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rerkgBbyampldto: jmp L$_last_blocks_done_xtiyCEhGGvgkorn L$_last_num_blocks_is_0_xtiyCEhGGvgkorn: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_xtiyCEhGGvgkorn: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_efvnrtvwAsfehEC L$_encrypt_32_blocks_efvnrtvwAsfehEC: cmpb $240,%r15b jae L$_16_blocks_overflow_kzaebDdDwylbAcu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_kzaebDdDwylbAcu L$_16_blocks_overflow_kzaebDdDwylbAcu: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_kzaebDdDwylbAcu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_hfufmxvqjkdtxiG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_hfufmxvqjkdtxiG L$_16_blocks_overflow_hfufmxvqjkdtxiG: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_hfufmxvqjkdtxiG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_nGrjyBwfEzyFAvA cmpl $8,%r10d je L$_last_num_blocks_is_8_nGrjyBwfEzyFAvA jb L$_last_num_blocks_is_7_1_nGrjyBwfEzyFAvA cmpl $12,%r10d je L$_last_num_blocks_is_12_nGrjyBwfEzyFAvA jb L$_last_num_blocks_is_11_9_nGrjyBwfEzyFAvA cmpl $15,%r10d je L$_last_num_blocks_is_15_nGrjyBwfEzyFAvA ja L$_last_num_blocks_is_16_nGrjyBwfEzyFAvA cmpl $14,%r10d je L$_last_num_blocks_is_14_nGrjyBwfEzyFAvA jmp L$_last_num_blocks_is_13_nGrjyBwfEzyFAvA L$_last_num_blocks_is_11_9_nGrjyBwfEzyFAvA: cmpl $10,%r10d je L$_last_num_blocks_is_10_nGrjyBwfEzyFAvA ja L$_last_num_blocks_is_11_nGrjyBwfEzyFAvA jmp L$_last_num_blocks_is_9_nGrjyBwfEzyFAvA L$_last_num_blocks_is_7_1_nGrjyBwfEzyFAvA: cmpl $4,%r10d je L$_last_num_blocks_is_4_nGrjyBwfEzyFAvA jb L$_last_num_blocks_is_3_1_nGrjyBwfEzyFAvA cmpl $6,%r10d ja L$_last_num_blocks_is_7_nGrjyBwfEzyFAvA je L$_last_num_blocks_is_6_nGrjyBwfEzyFAvA jmp L$_last_num_blocks_is_5_nGrjyBwfEzyFAvA L$_last_num_blocks_is_3_1_nGrjyBwfEzyFAvA: cmpl $2,%r10d ja L$_last_num_blocks_is_3_nGrjyBwfEzyFAvA je L$_last_num_blocks_is_2_nGrjyBwfEzyFAvA L$_last_num_blocks_is_1_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_mklqBGCbyBjeEom vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_mklqBGCbyBjeEom L$_16_blocks_overflow_mklqBGCbyBjeEom: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_mklqBGCbyBjeEom: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_AljAsgffjDBAEDB subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AljAsgffjDBAEDB L$_small_initial_partial_block_AljAsgffjDBAEDB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_AljAsgffjDBAEDB L$_small_initial_compute_done_AljAsgffjDBAEDB: L$_after_reduction_AljAsgffjDBAEDB: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_2_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_ADzEaGzEEnztayt vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_ADzEaGzEEnztayt L$_16_blocks_overflow_ADzEaGzEEnztayt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_ADzEaGzEEnztayt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_awmEjFhlgwizrsw subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_awmEjFhlgwizrsw L$_small_initial_partial_block_awmEjFhlgwizrsw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_awmEjFhlgwizrsw: orq %r8,%r8 je L$_after_reduction_awmEjFhlgwizrsw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_awmEjFhlgwizrsw: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_3_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_lcaBxDbeGChbeFD vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_lcaBxDbeGChbeFD L$_16_blocks_overflow_lcaBxDbeGChbeFD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_lcaBxDbeGChbeFD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mBDBtxmxpwzmxwj subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mBDBtxmxpwzmxwj L$_small_initial_partial_block_mBDBtxmxpwzmxwj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mBDBtxmxpwzmxwj: orq %r8,%r8 je L$_after_reduction_mBDBtxmxpwzmxwj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_mBDBtxmxpwzmxwj: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_4_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_pawpbdkivckxDwC vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_pawpbdkivckxDwC L$_16_blocks_overflow_pawpbdkivckxDwC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_pawpbdkivckxDwC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wubpbikcrdlgswu subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wubpbikcrdlgswu L$_small_initial_partial_block_wubpbikcrdlgswu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wubpbikcrdlgswu: orq %r8,%r8 je L$_after_reduction_wubpbikcrdlgswu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wubpbikcrdlgswu: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_5_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_DaxgvFmGcDpdBDr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_DaxgvFmGcDpdBDr L$_16_blocks_overflow_DaxgvFmGcDpdBDr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_DaxgvFmGcDpdBDr: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wfpxmlzpEjGxgfg subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wfpxmlzpEjGxgfg L$_small_initial_partial_block_wfpxmlzpEjGxgfg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wfpxmlzpEjGxgfg: orq %r8,%r8 je L$_after_reduction_wfpxmlzpEjGxgfg vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wfpxmlzpEjGxgfg: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_6_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_GCBuEfGizfDEkbf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_GCBuEfGizfDEkbf L$_16_blocks_overflow_GCBuEfGizfDEkbf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_GCBuEfGizfDEkbf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_bsGacaiacduekkh subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_bsGacaiacduekkh L$_small_initial_partial_block_bsGacaiacduekkh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_bsGacaiacduekkh: orq %r8,%r8 je L$_after_reduction_bsGacaiacduekkh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_bsGacaiacduekkh: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_7_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_sxxwCglaApctqvC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_sxxwCglaApctqvC L$_16_blocks_overflow_sxxwCglaApctqvC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_sxxwCglaApctqvC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yiCblticDBdDvqz subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yiCblticDBdDvqz L$_small_initial_partial_block_yiCblticDBdDvqz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yiCblticDBdDvqz: orq %r8,%r8 je L$_after_reduction_yiCblticDBdDvqz vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yiCblticDBdDvqz: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_8_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_CnnuddjEBnFGdsj vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_CnnuddjEBnFGdsj L$_16_blocks_overflow_CnnuddjEBnFGdsj: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_CnnuddjEBnFGdsj: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ozBrEzEFaraubuw subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ozBrEzEFaraubuw L$_small_initial_partial_block_ozBrEzEFaraubuw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ozBrEzEFaraubuw: orq %r8,%r8 je L$_after_reduction_ozBrEzEFaraubuw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ozBrEzEFaraubuw: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_9_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_jwawBbqsGrnbEEd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_jwawBbqsGrnbEEd L$_16_blocks_overflow_jwawBbqsGrnbEEd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_jwawBbqsGrnbEEd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FtmdhyAthqlklcF subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FtmdhyAthqlklcF L$_small_initial_partial_block_FtmdhyAthqlklcF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FtmdhyAthqlklcF: orq %r8,%r8 je L$_after_reduction_FtmdhyAthqlklcF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FtmdhyAthqlklcF: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_10_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_bEhtipvqjwytqAA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_bEhtipvqjwytqAA L$_16_blocks_overflow_bEhtipvqjwytqAA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_bEhtipvqjwytqAA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dGfczcdzdkvubwf subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dGfczcdzdkvubwf L$_small_initial_partial_block_dGfczcdzdkvubwf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dGfczcdzdkvubwf: orq %r8,%r8 je L$_after_reduction_dGfczcdzdkvubwf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dGfczcdzdkvubwf: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_11_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_peywgEttBymhlkG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_peywgEttBymhlkG L$_16_blocks_overflow_peywgEttBymhlkG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_peywgEttBymhlkG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kGoguFjBsnAyegA subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kGoguFjBsnAyegA L$_small_initial_partial_block_kGoguFjBsnAyegA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kGoguFjBsnAyegA: orq %r8,%r8 je L$_after_reduction_kGoguFjBsnAyegA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kGoguFjBsnAyegA: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_12_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_pfftEtegsrsinbs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_pfftEtegsrsinbs L$_16_blocks_overflow_pfftEtegsrsinbs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_pfftEtegsrsinbs: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_epFvAomFdDAhsfr subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_epFvAomFdDAhsfr L$_small_initial_partial_block_epFvAomFdDAhsfr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_epFvAomFdDAhsfr: orq %r8,%r8 je L$_after_reduction_epFvAomFdDAhsfr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_epFvAomFdDAhsfr: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_13_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_hoEpuvlFtAdDDCj vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_hoEpuvlFtAdDDCj L$_16_blocks_overflow_hoEpuvlFtAdDDCj: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_hoEpuvlFtAdDDCj: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lwaCfdsabqxsDae subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lwaCfdsabqxsDae L$_small_initial_partial_block_lwaCfdsabqxsDae: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lwaCfdsabqxsDae: orq %r8,%r8 je L$_after_reduction_lwaCfdsabqxsDae vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lwaCfdsabqxsDae: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_14_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_kDibsGzbehdlyln vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_kDibsGzbehdlyln L$_16_blocks_overflow_kDibsGzbehdlyln: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_kDibsGzbehdlyln: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_aygEgEvDgGbktBd subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_aygEgEvDgGbktBd L$_small_initial_partial_block_aygEgEvDgGbktBd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_aygEgEvDgGbktBd: orq %r8,%r8 je L$_after_reduction_aygEgEvDgGbktBd vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_aygEgEvDgGbktBd: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_15_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_vejCgbGykbnkAnl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_vejCgbGykbnkAnl L$_16_blocks_overflow_vejCgbGykbnkAnl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_vejCgbGykbnkAnl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AeaGuuDepzdAfkw subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AeaGuuDepzdAfkw L$_small_initial_partial_block_AeaGuuDepzdAfkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AeaGuuDepzdAfkw: orq %r8,%r8 je L$_after_reduction_AeaGuuDepzdAfkw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AeaGuuDepzdAfkw: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_16_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_oEmrkvwdwsmBgef vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_oEmrkvwdwsmBgef L$_16_blocks_overflow_oEmrkvwdwsmBgef: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_oEmrkvwdwsmBgef: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_emEtFnwcsvbsGee: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_emEtFnwcsvbsGee: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_emEtFnwcsvbsGee: jmp L$_last_blocks_done_nGrjyBwfEzyFAvA L$_last_num_blocks_is_0_nGrjyBwfEzyFAvA: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_nGrjyBwfEzyFAvA: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_efvnrtvwAsfehEC L$_encrypt_16_blocks_efvnrtvwAsfehEC: cmpb $240,%r15b jae L$_16_blocks_overflow_evgrutpeAjmaukd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_evgrutpeAjmaukd L$_16_blocks_overflow_evgrutpeAjmaukd: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_evgrutpeAjmaukd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_FBaFgdErDhzlksr cmpl $8,%r10d je L$_last_num_blocks_is_8_FBaFgdErDhzlksr jb L$_last_num_blocks_is_7_1_FBaFgdErDhzlksr cmpl $12,%r10d je L$_last_num_blocks_is_12_FBaFgdErDhzlksr jb L$_last_num_blocks_is_11_9_FBaFgdErDhzlksr cmpl $15,%r10d je L$_last_num_blocks_is_15_FBaFgdErDhzlksr ja L$_last_num_blocks_is_16_FBaFgdErDhzlksr cmpl $14,%r10d je L$_last_num_blocks_is_14_FBaFgdErDhzlksr jmp L$_last_num_blocks_is_13_FBaFgdErDhzlksr L$_last_num_blocks_is_11_9_FBaFgdErDhzlksr: cmpl $10,%r10d je L$_last_num_blocks_is_10_FBaFgdErDhzlksr ja L$_last_num_blocks_is_11_FBaFgdErDhzlksr jmp L$_last_num_blocks_is_9_FBaFgdErDhzlksr L$_last_num_blocks_is_7_1_FBaFgdErDhzlksr: cmpl $4,%r10d je L$_last_num_blocks_is_4_FBaFgdErDhzlksr jb L$_last_num_blocks_is_3_1_FBaFgdErDhzlksr cmpl $6,%r10d ja L$_last_num_blocks_is_7_FBaFgdErDhzlksr je L$_last_num_blocks_is_6_FBaFgdErDhzlksr jmp L$_last_num_blocks_is_5_FBaFgdErDhzlksr L$_last_num_blocks_is_3_1_FBaFgdErDhzlksr: cmpl $2,%r10d ja L$_last_num_blocks_is_3_FBaFgdErDhzlksr je L$_last_num_blocks_is_2_FBaFgdErDhzlksr L$_last_num_blocks_is_1_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_EztzACczExrozqe vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_EztzACczExrozqe L$_16_blocks_overflow_EztzACczExrozqe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_EztzACczExrozqe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_CCCssCzirDpGCgu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_CCCssCzirDpGCgu L$_small_initial_partial_block_CCCssCzirDpGCgu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_CCCssCzirDpGCgu L$_small_initial_compute_done_CCCssCzirDpGCgu: L$_after_reduction_CCCssCzirDpGCgu: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_2_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_ddpheeylmysesqA vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_ddpheeylmysesqA L$_16_blocks_overflow_ddpheeylmysesqA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_ddpheeylmysesqA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kEwhkniEotxddri subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kEwhkniEotxddri L$_small_initial_partial_block_kEwhkniEotxddri: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kEwhkniEotxddri: orq %r8,%r8 je L$_after_reduction_kEwhkniEotxddri vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kEwhkniEotxddri: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_3_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_vAzgdsEEohhszlv vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_vAzgdsEEohhszlv L$_16_blocks_overflow_vAzgdsEEohhszlv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_vAzgdsEEohhszlv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vgpvCquElabkfFm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vgpvCquElabkfFm L$_small_initial_partial_block_vgpvCquElabkfFm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vgpvCquElabkfFm: orq %r8,%r8 je L$_after_reduction_vgpvCquElabkfFm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vgpvCquElabkfFm: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_4_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_ciiDnbwsdfFhyEA vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_ciiDnbwsdfFhyEA L$_16_blocks_overflow_ciiDnbwsdfFhyEA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_ciiDnbwsdfFhyEA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BmnofkldoqxnfuE subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BmnofkldoqxnfuE L$_small_initial_partial_block_BmnofkldoqxnfuE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BmnofkldoqxnfuE: orq %r8,%r8 je L$_after_reduction_BmnofkldoqxnfuE vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BmnofkldoqxnfuE: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_5_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_AGvFmhBetCxAviv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_AGvFmhBetCxAviv L$_16_blocks_overflow_AGvFmhBetCxAviv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_AGvFmhBetCxAviv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tnjvAdygufmEFFh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tnjvAdygufmEFFh L$_small_initial_partial_block_tnjvAdygufmEFFh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tnjvAdygufmEFFh: orq %r8,%r8 je L$_after_reduction_tnjvAdygufmEFFh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tnjvAdygufmEFFh: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_6_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_sjympigbCCDhsDn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_sjympigbCCDhsDn L$_16_blocks_overflow_sjympigbCCDhsDn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_sjympigbCCDhsDn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_cnyvDpbBAuzhoGm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_cnyvDpbBAuzhoGm L$_small_initial_partial_block_cnyvDpbBAuzhoGm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cnyvDpbBAuzhoGm: orq %r8,%r8 je L$_after_reduction_cnyvDpbBAuzhoGm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_cnyvDpbBAuzhoGm: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_7_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_puBiejaewnoDvka vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_puBiejaewnoDvka L$_16_blocks_overflow_puBiejaewnoDvka: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_puBiejaewnoDvka: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kEkkBlBkynveErA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kEkkBlBkynveErA L$_small_initial_partial_block_kEkkBlBkynveErA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kEkkBlBkynveErA: orq %r8,%r8 je L$_after_reduction_kEkkBlBkynveErA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kEkkBlBkynveErA: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_8_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_eaeCeiduedGDdDq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_eaeCeiduedGDdDq L$_16_blocks_overflow_eaeCeiduedGDdDq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_eaeCeiduedGDdDq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qsuohqatcFrqreB subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qsuohqatcFrqreB L$_small_initial_partial_block_qsuohqatcFrqreB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qsuohqatcFrqreB: orq %r8,%r8 je L$_after_reduction_qsuohqatcFrqreB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qsuohqatcFrqreB: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_9_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_zgrBucdeiivwwje vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_zgrBucdeiivwwje L$_16_blocks_overflow_zgrBucdeiivwwje: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_zgrBucdeiivwwje: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BobokvzEgBCGCux subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BobokvzEgBCGCux L$_small_initial_partial_block_BobokvzEgBCGCux: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BobokvzEgBCGCux: orq %r8,%r8 je L$_after_reduction_BobokvzEgBCGCux vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BobokvzEgBCGCux: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_10_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_rqjyEzzCiBijwho vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_rqjyEzzCiBijwho L$_16_blocks_overflow_rqjyEzzCiBijwho: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_rqjyEzzCiBijwho: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_gptBtCibyiDhlou subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_gptBtCibyiDhlou L$_small_initial_partial_block_gptBtCibyiDhlou: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_gptBtCibyiDhlou: orq %r8,%r8 je L$_after_reduction_gptBtCibyiDhlou vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_gptBtCibyiDhlou: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_11_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_udirAnChEpiDCdb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_udirAnChEpiDCdb L$_16_blocks_overflow_udirAnChEpiDCdb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_udirAnChEpiDCdb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EuymoBDpuhDzkkw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EuymoBDpuhDzkkw L$_small_initial_partial_block_EuymoBDpuhDzkkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EuymoBDpuhDzkkw: orq %r8,%r8 je L$_after_reduction_EuymoBDpuhDzkkw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EuymoBDpuhDzkkw: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_12_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_nCrveguADGnpgFu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_nCrveguADGnpgFu L$_16_blocks_overflow_nCrveguADGnpgFu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_nCrveguADGnpgFu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EbClbforwjDGhdq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EbClbforwjDGhdq L$_small_initial_partial_block_EbClbforwjDGhdq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EbClbforwjDGhdq: orq %r8,%r8 je L$_after_reduction_EbClbforwjDGhdq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EbClbforwjDGhdq: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_13_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_FuAeDsuGfAcCbnh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_FuAeDsuGfAcCbnh L$_16_blocks_overflow_FuAeDsuGfAcCbnh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_FuAeDsuGfAcCbnh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GdeeilznaFbDlhh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GdeeilznaFbDlhh L$_small_initial_partial_block_GdeeilznaFbDlhh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GdeeilznaFbDlhh: orq %r8,%r8 je L$_after_reduction_GdeeilznaFbDlhh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GdeeilznaFbDlhh: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_14_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_FvEhyckDsphilDy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_FvEhyckDsphilDy L$_16_blocks_overflow_FvEhyckDsphilDy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_FvEhyckDsphilDy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_fkoDbsekulkxCkw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_fkoDbsekulkxCkw L$_small_initial_partial_block_fkoDbsekulkxCkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_fkoDbsekulkxCkw: orq %r8,%r8 je L$_after_reduction_fkoDbsekulkxCkw vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_fkoDbsekulkxCkw: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_15_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_lpConoqwylkjlwn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_lpConoqwylkjlwn L$_16_blocks_overflow_lpConoqwylkjlwn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_lpConoqwylkjlwn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DbmjnDvmvfAywny subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DbmjnDvmvfAywny L$_small_initial_partial_block_DbmjnDvmvfAywny: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DbmjnDvmvfAywny: orq %r8,%r8 je L$_after_reduction_DbmjnDvmvfAywny vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_DbmjnDvmvfAywny: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_16_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_vhaFwxkrByAhtie vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_vhaFwxkrByAhtie L$_16_blocks_overflow_vhaFwxkrByAhtie: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_vhaFwxkrByAhtie: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_ciyykzjryphtjAc: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ciyykzjryphtjAc: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ciyykzjryphtjAc: jmp L$_last_blocks_done_FBaFgdErDhzlksr L$_last_num_blocks_is_0_FBaFgdErDhzlksr: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_FBaFgdErDhzlksr: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_efvnrtvwAsfehEC L$_message_below_32_blocks_efvnrtvwAsfehEC: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_yBFabBiEpjEBBsr vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) L$_skip_hkeys_precomputation_yBFabBiEpjEBBsr: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_cuuhesezwjvjmyk cmpl $8,%r10d je L$_last_num_blocks_is_8_cuuhesezwjvjmyk jb L$_last_num_blocks_is_7_1_cuuhesezwjvjmyk cmpl $12,%r10d je L$_last_num_blocks_is_12_cuuhesezwjvjmyk jb L$_last_num_blocks_is_11_9_cuuhesezwjvjmyk cmpl $15,%r10d je L$_last_num_blocks_is_15_cuuhesezwjvjmyk ja L$_last_num_blocks_is_16_cuuhesezwjvjmyk cmpl $14,%r10d je L$_last_num_blocks_is_14_cuuhesezwjvjmyk jmp L$_last_num_blocks_is_13_cuuhesezwjvjmyk L$_last_num_blocks_is_11_9_cuuhesezwjvjmyk: cmpl $10,%r10d je L$_last_num_blocks_is_10_cuuhesezwjvjmyk ja L$_last_num_blocks_is_11_cuuhesezwjvjmyk jmp L$_last_num_blocks_is_9_cuuhesezwjvjmyk L$_last_num_blocks_is_7_1_cuuhesezwjvjmyk: cmpl $4,%r10d je L$_last_num_blocks_is_4_cuuhesezwjvjmyk jb L$_last_num_blocks_is_3_1_cuuhesezwjvjmyk cmpl $6,%r10d ja L$_last_num_blocks_is_7_cuuhesezwjvjmyk je L$_last_num_blocks_is_6_cuuhesezwjvjmyk jmp L$_last_num_blocks_is_5_cuuhesezwjvjmyk L$_last_num_blocks_is_3_1_cuuhesezwjvjmyk: cmpl $2,%r10d ja L$_last_num_blocks_is_3_cuuhesezwjvjmyk je L$_last_num_blocks_is_2_cuuhesezwjvjmyk L$_last_num_blocks_is_1_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_yqjovttCDEvpyyd vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_yqjovttCDEvpyyd L$_16_blocks_overflow_yqjovttCDEvpyyd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_yqjovttCDEvpyyd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_vEAkobbEjFEfDjE subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vEAkobbEjFEfDjE L$_small_initial_partial_block_vEAkobbEjFEfDjE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_vEAkobbEjFEfDjE L$_small_initial_compute_done_vEAkobbEjFEfDjE: L$_after_reduction_vEAkobbEjFEfDjE: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_2_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_dunlemEBzoyBoxa vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_dunlemEBzoyBoxa L$_16_blocks_overflow_dunlemEBzoyBoxa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_dunlemEBzoyBoxa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jwqibvpanppwwkg subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jwqibvpanppwwkg L$_small_initial_partial_block_jwqibvpanppwwkg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jwqibvpanppwwkg: orq %r8,%r8 je L$_after_reduction_jwqibvpanppwwkg vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_jwqibvpanppwwkg: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_3_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_gknxnDbcehnficG vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_gknxnDbcehnficG L$_16_blocks_overflow_gknxnDbcehnficG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_gknxnDbcehnficG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yrqnxcGbhfxbzua subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yrqnxcGbhfxbzua L$_small_initial_partial_block_yrqnxcGbhfxbzua: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yrqnxcGbhfxbzua: orq %r8,%r8 je L$_after_reduction_yrqnxcGbhfxbzua vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yrqnxcGbhfxbzua: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_4_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_vkChDblsuoFkgEp vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_vkChDblsuoFkgEp L$_16_blocks_overflow_vkChDblsuoFkgEp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_vkChDblsuoFkgEp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kqcfotnkDdwFCle subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kqcfotnkDdwFCle L$_small_initial_partial_block_kqcfotnkDdwFCle: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kqcfotnkDdwFCle: orq %r8,%r8 je L$_after_reduction_kqcfotnkDdwFCle vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kqcfotnkDdwFCle: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_5_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_aGCpdetktlAtivE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_aGCpdetktlAtivE L$_16_blocks_overflow_aGCpdetktlAtivE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_aGCpdetktlAtivE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BhyxbheFwtzAGqD subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BhyxbheFwtzAGqD L$_small_initial_partial_block_BhyxbheFwtzAGqD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BhyxbheFwtzAGqD: orq %r8,%r8 je L$_after_reduction_BhyxbheFwtzAGqD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BhyxbheFwtzAGqD: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_6_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_DlEhcmhmAqggthl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_DlEhcmhmAqggthl L$_16_blocks_overflow_DlEhcmhmAqggthl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_DlEhcmhmAqggthl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ykkpmhjniEvyltu subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ykkpmhjniEvyltu L$_small_initial_partial_block_ykkpmhjniEvyltu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ykkpmhjniEvyltu: orq %r8,%r8 je L$_after_reduction_ykkpmhjniEvyltu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ykkpmhjniEvyltu: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_7_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_szxcAmcFcFxFikD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_szxcAmcFcFxFikD L$_16_blocks_overflow_szxcAmcFcFxFikD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_szxcAmcFcFxFikD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BsvCgmoprgDppla subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BsvCgmoprgDppla L$_small_initial_partial_block_BsvCgmoprgDppla: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BsvCgmoprgDppla: orq %r8,%r8 je L$_after_reduction_BsvCgmoprgDppla vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BsvCgmoprgDppla: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_8_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_nGgmonbofwfdiqp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_nGgmonbofwfdiqp L$_16_blocks_overflow_nGgmonbofwfdiqp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_nGgmonbofwfdiqp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qxxbtfdlDzEAenB subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qxxbtfdlDzEAenB L$_small_initial_partial_block_qxxbtfdlDzEAenB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qxxbtfdlDzEAenB: orq %r8,%r8 je L$_after_reduction_qxxbtfdlDzEAenB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qxxbtfdlDzEAenB: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_9_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_isErwnbzGxuwnib vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_isErwnbzGxuwnib L$_16_blocks_overflow_isErwnbzGxuwnib: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_isErwnbzGxuwnib: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ldosriajsdgdtty subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ldosriajsdgdtty L$_small_initial_partial_block_ldosriajsdgdtty: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ldosriajsdgdtty: orq %r8,%r8 je L$_after_reduction_ldosriajsdgdtty vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ldosriajsdgdtty: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_10_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_ylkmjtxhbazdAht vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_ylkmjtxhbazdAht L$_16_blocks_overflow_ylkmjtxhbazdAht: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_ylkmjtxhbazdAht: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_cogjdCgsFwwACAv subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_cogjdCgsFwwACAv L$_small_initial_partial_block_cogjdCgsFwwACAv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cogjdCgsFwwACAv: orq %r8,%r8 je L$_after_reduction_cogjdCgsFwwACAv vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_cogjdCgsFwwACAv: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_11_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_amFqbyqnsgkbEyu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_amFqbyqnsgkbEyu L$_16_blocks_overflow_amFqbyqnsgkbEyu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_amFqbyqnsgkbEyu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_duCGbqEavktkktr subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_duCGbqEavktkktr L$_small_initial_partial_block_duCGbqEavktkktr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_duCGbqEavktkktr: orq %r8,%r8 je L$_after_reduction_duCGbqEavktkktr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_duCGbqEavktkktr: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_12_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_Gxdljjoscahpipo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_Gxdljjoscahpipo L$_16_blocks_overflow_Gxdljjoscahpipo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_Gxdljjoscahpipo: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EbrtvvbpfhnmgEG subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EbrtvvbpfhnmgEG L$_small_initial_partial_block_EbrtvvbpfhnmgEG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EbrtvvbpfhnmgEG: orq %r8,%r8 je L$_after_reduction_EbrtvvbpfhnmgEG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EbrtvvbpfhnmgEG: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_13_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_BzbwlusABaejjjy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_BzbwlusABaejjjy L$_16_blocks_overflow_BzbwlusABaejjjy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_BzbwlusABaejjjy: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_axssylktqnfAEEo subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_axssylktqnfAEEo L$_small_initial_partial_block_axssylktqnfAEEo: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_axssylktqnfAEEo: orq %r8,%r8 je L$_after_reduction_axssylktqnfAEEo vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_axssylktqnfAEEo: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_14_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_wfxluBeiqgADmFb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_wfxluBeiqgADmFb L$_16_blocks_overflow_wfxluBeiqgADmFb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_wfxluBeiqgADmFb: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_goygycijAEpsvvt subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_goygycijAEpsvvt L$_small_initial_partial_block_goygycijAEpsvvt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_goygycijAEpsvvt: orq %r8,%r8 je L$_after_reduction_goygycijAEpsvvt vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_goygycijAEpsvvt: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_15_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_vyklFkDwzsnvgsC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_vyklFkDwzsnvgsC L$_16_blocks_overflow_vyklFkDwzsnvgsC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_vyklFkDwzsnvgsC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wtfwhoaquntnsFC subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wtfwhoaquntnsFC L$_small_initial_partial_block_wtfwhoaquntnsFC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wtfwhoaquntnsFC: orq %r8,%r8 je L$_after_reduction_wtfwhoaquntnsFC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wtfwhoaquntnsFC: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_16_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_cwmmduuojwChbzc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_cwmmduuojwChbzc L$_16_blocks_overflow_cwmmduuojwChbzc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_cwmmduuojwChbzc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_EFFoGallwwbomEy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EFFoGallwwbomEy: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EFFoGallwwbomEy: jmp L$_last_blocks_done_cuuhesezwjvjmyk L$_last_num_blocks_is_0_cuuhesezwjvjmyk: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_cuuhesezwjvjmyk: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_efvnrtvwAsfehEC L$_message_below_equal_16_blocks_efvnrtvwAsfehEC: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je L$_small_initial_num_blocks_is_8_sFoDGktxtpnDmhn jl L$_small_initial_num_blocks_is_7_1_sFoDGktxtpnDmhn cmpq $12,%r12 je L$_small_initial_num_blocks_is_12_sFoDGktxtpnDmhn jl L$_small_initial_num_blocks_is_11_9_sFoDGktxtpnDmhn cmpq $16,%r12 je L$_small_initial_num_blocks_is_16_sFoDGktxtpnDmhn cmpq $15,%r12 je L$_small_initial_num_blocks_is_15_sFoDGktxtpnDmhn cmpq $14,%r12 je L$_small_initial_num_blocks_is_14_sFoDGktxtpnDmhn jmp L$_small_initial_num_blocks_is_13_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_11_9_sFoDGktxtpnDmhn: cmpq $11,%r12 je L$_small_initial_num_blocks_is_11_sFoDGktxtpnDmhn cmpq $10,%r12 je L$_small_initial_num_blocks_is_10_sFoDGktxtpnDmhn jmp L$_small_initial_num_blocks_is_9_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_7_1_sFoDGktxtpnDmhn: cmpq $4,%r12 je L$_small_initial_num_blocks_is_4_sFoDGktxtpnDmhn jl L$_small_initial_num_blocks_is_3_1_sFoDGktxtpnDmhn cmpq $7,%r12 je L$_small_initial_num_blocks_is_7_sFoDGktxtpnDmhn cmpq $6,%r12 je L$_small_initial_num_blocks_is_6_sFoDGktxtpnDmhn jmp L$_small_initial_num_blocks_is_5_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_3_1_sFoDGktxtpnDmhn: cmpq $3,%r12 je L$_small_initial_num_blocks_is_3_sFoDGktxtpnDmhn cmpq $2,%r12 je L$_small_initial_num_blocks_is_2_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_1_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm6,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_FGCgmvsGdutropz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FGCgmvsGdutropz L$_small_initial_partial_block_FGCgmvsGdutropz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp L$_after_reduction_FGCgmvsGdutropz L$_small_initial_compute_done_FGCgmvsGdutropz: L$_after_reduction_FGCgmvsGdutropz: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_2_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm6,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jEBbtDDBfBjEltG subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jEBbtDDBfBjEltG L$_small_initial_partial_block_jEBbtDDBfBjEltG: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jEBbtDDBfBjEltG: orq %r8,%r8 je L$_after_reduction_jEBbtDDBfBjEltG vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_jEBbtDDBfBjEltG: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_3_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EjibsatBlzkgqAl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EjibsatBlzkgqAl L$_small_initial_partial_block_EjibsatBlzkgqAl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EjibsatBlzkgqAl: orq %r8,%r8 je L$_after_reduction_EjibsatBlzkgqAl vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_EjibsatBlzkgqAl: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_4_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xqconsagugmDarn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xqconsagugmDarn L$_small_initial_partial_block_xqconsagugmDarn: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xqconsagugmDarn: orq %r8,%r8 je L$_after_reduction_xqconsagugmDarn vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_xqconsagugmDarn: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_5_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %xmm29,%xmm7,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_pbFCejpvpmxjAhk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pbFCejpvpmxjAhk L$_small_initial_partial_block_pbFCejpvpmxjAhk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pbFCejpvpmxjAhk: orq %r8,%r8 je L$_after_reduction_pbFCejpvpmxjAhk vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_pbFCejpvpmxjAhk: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_6_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %ymm29,%ymm7,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_uktkzFjovqcxfqp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_uktkzFjovqcxfqp L$_small_initial_partial_block_uktkzFjovqcxfqp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_uktkzFjovqcxfqp: orq %r8,%r8 je L$_after_reduction_uktkzFjovqcxfqp vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_uktkzFjovqcxfqp: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_7_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_giliDfyAgzgDsqz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_giliDfyAgzgDsqz L$_small_initial_partial_block_giliDfyAgzgDsqz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_giliDfyAgzgDsqz: orq %r8,%r8 je L$_after_reduction_giliDfyAgzgDsqz vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_giliDfyAgzgDsqz: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_8_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_DjnECqEweilEAGu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_DjnECqEweilEAGu L$_small_initial_partial_block_DjnECqEweilEAGu: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_DjnECqEweilEAGu: orq %r8,%r8 je L$_after_reduction_DjnECqEweilEAGu vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_DjnECqEweilEAGu: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_9_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %xmm29,%xmm10,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kgxaGkfnalAmrwz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kgxaGkfnalAmrwz L$_small_initial_partial_block_kgxaGkfnalAmrwz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kgxaGkfnalAmrwz: orq %r8,%r8 je L$_after_reduction_kgxaGkfnalAmrwz vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_kgxaGkfnalAmrwz: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_10_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %ymm29,%ymm10,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BCvcswkitbgmjFe subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BCvcswkitbgmjFe L$_small_initial_partial_block_BCvcswkitbgmjFe: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BCvcswkitbgmjFe: orq %r8,%r8 je L$_after_reduction_BCvcswkitbgmjFe vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_BCvcswkitbgmjFe: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_11_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xxiyEjrxujqtjjz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xxiyEjrxujqtjjz L$_small_initial_partial_block_xxiyEjrxujqtjjz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xxiyEjrxujqtjjz: orq %r8,%r8 je L$_after_reduction_xxiyEjrxujqtjjz vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_xxiyEjrxujqtjjz: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_12_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_jAwfsnuhpsyacia subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_jAwfsnuhpsyacia L$_small_initial_partial_block_jAwfsnuhpsyacia: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_jAwfsnuhpsyacia: orq %r8,%r8 je L$_after_reduction_jAwfsnuhpsyacia vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_jAwfsnuhpsyacia: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_13_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %xmm29,%xmm11,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iimAEdydkqcfzCi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iimAEdydkqcfzCi L$_small_initial_partial_block_iimAEdydkqcfzCi: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iimAEdydkqcfzCi: orq %r8,%r8 je L$_after_reduction_iimAEdydkqcfzCi vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_iimAEdydkqcfzCi: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_14_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %ymm29,%ymm11,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_cflpryEedqzCjvl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_cflpryEedqzCjvl L$_small_initial_partial_block_cflpryEedqzCjvl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cflpryEedqzCjvl: orq %r8,%r8 je L$_after_reduction_cflpryEedqzCjvl vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_cflpryEedqzCjvl: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_15_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_pFbDADbCplEDFfw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pFbDADbCplEDFfw L$_small_initial_partial_block_pFbDADbCplEDFfw: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pFbDADbCplEDFfw: orq %r8,%r8 je L$_after_reduction_pFbDADbCplEDFfw vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_pFbDADbCplEDFfw: jmp L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn L$_small_initial_num_blocks_is_16_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_pGqEmoznEqGhujq: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pGqEmoznEqGhujq: vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_pGqEmoznEqGhujq: L$_small_initial_blocks_encrypted_sFoDGktxtpnDmhn: L$_ghash_done_efvnrtvwAsfehEC: vmovdqu64 %xmm2,0(%rsi) L$_enc_dec_done_efvnrtvwAsfehEC: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) L$_enc_dec_abort_efvnrtvwAsfehEC: jmp L$exit_gcm_decrypt .p2align 5 L$aes_gcm_decrypt_256_avx512: orq %r8,%r8 je L$_enc_dec_abort_keEetjmxflGqBfv xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je L$_partial_block_done_fhsskwCeFatEtrh movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vmovdqa64 %xmm0,%xmm6 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge L$_no_extra_mask_fhsskwCeFatEtrh subq %r13,%r12 L$_no_extra_mask_fhsskwCeFatEtrh: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpand %xmm0,%xmm6,%xmm6 vpshufb SHUF_MASK(%rip),%xmm6,%xmm6 vpshufb %xmm5,%xmm6,%xmm6 vpxorq %xmm6,%xmm14,%xmm14 cmpq $0,%r13 jl L$_partial_incomplete_fhsskwCeFatEtrh .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp L$_enc_dec_done_fhsskwCeFatEtrh L$_partial_incomplete_fhsskwCeFatEtrh: addl %r8d,(%rdx) movq %r8,%rax L$_enc_dec_done_fhsskwCeFatEtrh: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} L$_partial_block_done_fhsskwCeFatEtrh: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je L$_enc_dec_done_keEetjmxflGqBfv cmpq $256,%r8 jbe L$_message_below_equal_16_blocks_keEetjmxflGqBfv vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae L$_next_16_overflow_tpefFeFucnbumCh vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_tpefFeFucnbumCh L$_next_16_overflow_tpefFeFucnbumCh: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_tpefFeFucnbumCh: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 208(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 224(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_iDAhziwpjqoADaj vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) L$_skip_hkeys_precomputation_iDAhziwpjqoADaj: cmpq $512,%r8 jb L$_message_below_32_blocks_keEetjmxflGqBfv cmpb $240,%r15b jae L$_next_16_overflow_qgCCeDpdgxsjtxo vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp L$_next_16_ok_qgCCeDpdgxsjtxo L$_next_16_overflow_qgCCeDpdgxsjtxo: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 L$_next_16_ok_qgCCeDpdgxsjtxo: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 208(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 224(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz L$_skip_hkeys_precomputation_ErxbfranEhsBGhe vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) L$_skip_hkeys_precomputation_ErxbfranEhsBGhe: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb L$_no_more_big_nblocks_keEetjmxflGqBfv L$_encrypt_big_nblocks_keEetjmxflGqBfv: cmpb $240,%r15b jae L$_16_blocks_overflow_budzEysnblsjtjq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_budzEysnblsjtjq L$_16_blocks_overflow_budzEysnblsjtjq: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_budzEysnblsjtjq: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_ojhGelucjaDDiwh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_ojhGelucjaDDiwh L$_16_blocks_overflow_ojhGelucjaDDiwh: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_ojhGelucjaDDiwh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_jpAfkEctagbyfkB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_jpAfkEctagbyfkB L$_16_blocks_overflow_jpAfkEctagbyfkB: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_jpAfkEctagbyfkB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae L$_encrypt_big_nblocks_keEetjmxflGqBfv L$_no_more_big_nblocks_keEetjmxflGqBfv: cmpq $512,%r8 jae L$_encrypt_32_blocks_keEetjmxflGqBfv cmpq $256,%r8 jae L$_encrypt_16_blocks_keEetjmxflGqBfv L$_encrypt_0_blocks_ghash_32_keEetjmxflGqBfv: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_CjljsCjaoxujvDg cmpl $8,%r10d je L$_last_num_blocks_is_8_CjljsCjaoxujvDg jb L$_last_num_blocks_is_7_1_CjljsCjaoxujvDg cmpl $12,%r10d je L$_last_num_blocks_is_12_CjljsCjaoxujvDg jb L$_last_num_blocks_is_11_9_CjljsCjaoxujvDg cmpl $15,%r10d je L$_last_num_blocks_is_15_CjljsCjaoxujvDg ja L$_last_num_blocks_is_16_CjljsCjaoxujvDg cmpl $14,%r10d je L$_last_num_blocks_is_14_CjljsCjaoxujvDg jmp L$_last_num_blocks_is_13_CjljsCjaoxujvDg L$_last_num_blocks_is_11_9_CjljsCjaoxujvDg: cmpl $10,%r10d je L$_last_num_blocks_is_10_CjljsCjaoxujvDg ja L$_last_num_blocks_is_11_CjljsCjaoxujvDg jmp L$_last_num_blocks_is_9_CjljsCjaoxujvDg L$_last_num_blocks_is_7_1_CjljsCjaoxujvDg: cmpl $4,%r10d je L$_last_num_blocks_is_4_CjljsCjaoxujvDg jb L$_last_num_blocks_is_3_1_CjljsCjaoxujvDg cmpl $6,%r10d ja L$_last_num_blocks_is_7_CjljsCjaoxujvDg je L$_last_num_blocks_is_6_CjljsCjaoxujvDg jmp L$_last_num_blocks_is_5_CjljsCjaoxujvDg L$_last_num_blocks_is_3_1_CjljsCjaoxujvDg: cmpl $2,%r10d ja L$_last_num_blocks_is_3_CjljsCjaoxujvDg je L$_last_num_blocks_is_2_CjljsCjaoxujvDg L$_last_num_blocks_is_1_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_BpzosFahboxovuF vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_BpzosFahboxovuF L$_16_blocks_overflow_BpzosFahboxovuF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_BpzosFahboxovuF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_dzmevElEtmlqdvB subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dzmevElEtmlqdvB L$_small_initial_partial_block_dzmevElEtmlqdvB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_dzmevElEtmlqdvB L$_small_initial_compute_done_dzmevElEtmlqdvB: L$_after_reduction_dzmevElEtmlqdvB: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_2_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_idijgbEnolbjmvb vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_idijgbEnolbjmvb L$_16_blocks_overflow_idijgbEnolbjmvb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_idijgbEnolbjmvb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zvohpFFyvnbybFD subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zvohpFFyvnbybFD L$_small_initial_partial_block_zvohpFFyvnbybFD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zvohpFFyvnbybFD: orq %r8,%r8 je L$_after_reduction_zvohpFFyvnbybFD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_zvohpFFyvnbybFD: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_3_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_wghnihbAoEsnemr vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_wghnihbAoEsnemr L$_16_blocks_overflow_wghnihbAoEsnemr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_wghnihbAoEsnemr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_pjzlkCCsFsjiBsp subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pjzlkCCsFsjiBsp L$_small_initial_partial_block_pjzlkCCsFsjiBsp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pjzlkCCsFsjiBsp: orq %r8,%r8 je L$_after_reduction_pjzlkCCsFsjiBsp vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_pjzlkCCsFsjiBsp: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_4_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_CjzGmeouGBagvfs vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_CjzGmeouGBagvfs L$_16_blocks_overflow_CjzGmeouGBagvfs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_CjzGmeouGBagvfs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_emFtlfDdrDiyoGj subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_emFtlfDdrDiyoGj L$_small_initial_partial_block_emFtlfDdrDiyoGj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_emFtlfDdrDiyoGj: orq %r8,%r8 je L$_after_reduction_emFtlfDdrDiyoGj vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_emFtlfDdrDiyoGj: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_5_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_DgBblneEbhavoAc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_DgBblneEbhavoAc L$_16_blocks_overflow_DgBblneEbhavoAc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_DgBblneEbhavoAc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vowzfgidatEfBqr subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vowzfgidatEfBqr L$_small_initial_partial_block_vowzfgidatEfBqr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vowzfgidatEfBqr: orq %r8,%r8 je L$_after_reduction_vowzfgidatEfBqr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vowzfgidatEfBqr: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_6_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_sswuqofDefGijpp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_sswuqofDefGijpp L$_16_blocks_overflow_sswuqofDefGijpp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_sswuqofDefGijpp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zaEschzpbmFozoB subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zaEschzpbmFozoB L$_small_initial_partial_block_zaEschzpbmFozoB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zaEschzpbmFozoB: orq %r8,%r8 je L$_after_reduction_zaEschzpbmFozoB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_zaEschzpbmFozoB: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_7_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_lncoDbxzFvwogbC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_lncoDbxzFvwogbC L$_16_blocks_overflow_lncoDbxzFvwogbC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_lncoDbxzFvwogbC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mbqrkjfyrCjFtkC subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mbqrkjfyrCjFtkC L$_small_initial_partial_block_mbqrkjfyrCjFtkC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mbqrkjfyrCjFtkC: orq %r8,%r8 je L$_after_reduction_mbqrkjfyrCjFtkC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_mbqrkjfyrCjFtkC: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_8_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_ExCdtGvwDseyezE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_ExCdtGvwDseyezE L$_16_blocks_overflow_ExCdtGvwDseyezE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_ExCdtGvwDseyezE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ajbflDwBgvpaEcE subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ajbflDwBgvpaEcE L$_small_initial_partial_block_ajbflDwBgvpaEcE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ajbflDwBgvpaEcE: orq %r8,%r8 je L$_after_reduction_ajbflDwBgvpaEcE vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ajbflDwBgvpaEcE: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_9_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_qnvdfsmvntyhGuo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_qnvdfsmvntyhGuo L$_16_blocks_overflow_qnvdfsmvntyhGuo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_qnvdfsmvntyhGuo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_kduEkAyqanCoGvE subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_kduEkAyqanCoGvE L$_small_initial_partial_block_kduEkAyqanCoGvE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_kduEkAyqanCoGvE: orq %r8,%r8 je L$_after_reduction_kduEkAyqanCoGvE vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_kduEkAyqanCoGvE: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_10_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_ucjmDCDgtvwsblB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_ucjmDCDgtvwsblB L$_16_blocks_overflow_ucjmDCDgtvwsblB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_ucjmDCDgtvwsblB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_grEzijkmcwkEkrv subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_grEzijkmcwkEkrv L$_small_initial_partial_block_grEzijkmcwkEkrv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_grEzijkmcwkEkrv: orq %r8,%r8 je L$_after_reduction_grEzijkmcwkEkrv vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_grEzijkmcwkEkrv: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_11_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_tGfszqdtairfiAy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_tGfszqdtairfiAy L$_16_blocks_overflow_tGfszqdtairfiAy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_tGfszqdtairfiAy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_nyiEcniDhxadvrv subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_nyiEcniDhxadvrv L$_small_initial_partial_block_nyiEcniDhxadvrv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_nyiEcniDhxadvrv: orq %r8,%r8 je L$_after_reduction_nyiEcniDhxadvrv vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_nyiEcniDhxadvrv: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_12_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_GBxxxGGdrBGGAzv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_GBxxxGGdrBGGAzv L$_16_blocks_overflow_GBxxxGGdrBGGAzv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_GBxxxGGdrBGGAzv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vhoepgywGpbErsu subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vhoepgywGpbErsu L$_small_initial_partial_block_vhoepgywGpbErsu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vhoepgywGpbErsu: orq %r8,%r8 je L$_after_reduction_vhoepgywGpbErsu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vhoepgywGpbErsu: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_13_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_rFdlFzmcbwfmCFo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_rFdlFzmcbwfmCFo L$_16_blocks_overflow_rFdlFzmcbwfmCFo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_rFdlFzmcbwfmCFo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tidvwDCqozzjufl subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tidvwDCqozzjufl L$_small_initial_partial_block_tidvwDCqozzjufl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tidvwDCqozzjufl: orq %r8,%r8 je L$_after_reduction_tidvwDCqozzjufl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tidvwDCqozzjufl: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_14_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_yDllfugovhaluis vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_yDllfugovhaluis L$_16_blocks_overflow_yDllfugovhaluis: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_yDllfugovhaluis: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vGAylAjswesdfcA subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vGAylAjswesdfcA L$_small_initial_partial_block_vGAylAjswesdfcA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vGAylAjswesdfcA: orq %r8,%r8 je L$_after_reduction_vGAylAjswesdfcA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_vGAylAjswesdfcA: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_15_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_pincAkEEiiwgxGh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_pincAkEEiiwgxGh L$_16_blocks_overflow_pincAkEEiiwgxGh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_pincAkEEiiwgxGh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FgsluzdCoDzfqdG subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FgsluzdCoDzfqdG L$_small_initial_partial_block_FgsluzdCoDzfqdG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FgsluzdCoDzfqdG: orq %r8,%r8 je L$_after_reduction_FgsluzdCoDzfqdG vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FgsluzdCoDzfqdG: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_16_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_dBDAoEoFjhwcanb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_dBDAoEoFjhwcanb L$_16_blocks_overflow_dBDAoEoFjhwcanb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_dBDAoEoFjhwcanb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_fdoxuvdoEsDrnFi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_fdoxuvdoEsDrnFi: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_fdoxuvdoEsDrnFi: jmp L$_last_blocks_done_CjljsCjaoxujvDg L$_last_num_blocks_is_0_CjljsCjaoxujvDg: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_CjljsCjaoxujvDg: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_keEetjmxflGqBfv L$_encrypt_32_blocks_keEetjmxflGqBfv: cmpb $240,%r15b jae L$_16_blocks_overflow_wovDjxgtezsaCbn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_wovDjxgtezsaCbn L$_16_blocks_overflow_wovDjxgtezsaCbn: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_wovDjxgtezsaCbn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae L$_16_blocks_overflow_qraoeizxDFojkGy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_qraoeizxDFojkGy L$_16_blocks_overflow_qraoeizxDFojkGy: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_qraoeizxDFojkGy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_CafpBFgwEozfiCz cmpl $8,%r10d je L$_last_num_blocks_is_8_CafpBFgwEozfiCz jb L$_last_num_blocks_is_7_1_CafpBFgwEozfiCz cmpl $12,%r10d je L$_last_num_blocks_is_12_CafpBFgwEozfiCz jb L$_last_num_blocks_is_11_9_CafpBFgwEozfiCz cmpl $15,%r10d je L$_last_num_blocks_is_15_CafpBFgwEozfiCz ja L$_last_num_blocks_is_16_CafpBFgwEozfiCz cmpl $14,%r10d je L$_last_num_blocks_is_14_CafpBFgwEozfiCz jmp L$_last_num_blocks_is_13_CafpBFgwEozfiCz L$_last_num_blocks_is_11_9_CafpBFgwEozfiCz: cmpl $10,%r10d je L$_last_num_blocks_is_10_CafpBFgwEozfiCz ja L$_last_num_blocks_is_11_CafpBFgwEozfiCz jmp L$_last_num_blocks_is_9_CafpBFgwEozfiCz L$_last_num_blocks_is_7_1_CafpBFgwEozfiCz: cmpl $4,%r10d je L$_last_num_blocks_is_4_CafpBFgwEozfiCz jb L$_last_num_blocks_is_3_1_CafpBFgwEozfiCz cmpl $6,%r10d ja L$_last_num_blocks_is_7_CafpBFgwEozfiCz je L$_last_num_blocks_is_6_CafpBFgwEozfiCz jmp L$_last_num_blocks_is_5_CafpBFgwEozfiCz L$_last_num_blocks_is_3_1_CafpBFgwEozfiCz: cmpl $2,%r10d ja L$_last_num_blocks_is_3_CafpBFgwEozfiCz je L$_last_num_blocks_is_2_CafpBFgwEozfiCz L$_last_num_blocks_is_1_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_fekfutzigacvqDc vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_fekfutzigacvqDc L$_16_blocks_overflow_fekfutzigacvqDc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_fekfutzigacvqDc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_BqCzdBwrfgovfqg subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BqCzdBwrfgovfqg L$_small_initial_partial_block_BqCzdBwrfgovfqg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_BqCzdBwrfgovfqg L$_small_initial_compute_done_BqCzdBwrfgovfqg: L$_after_reduction_BqCzdBwrfgovfqg: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_2_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_zEwleqntmDxAeyd vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_zEwleqntmDxAeyd L$_16_blocks_overflow_zEwleqntmDxAeyd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_zEwleqntmDxAeyd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_eohifjbpuerrzyg subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_eohifjbpuerrzyg L$_small_initial_partial_block_eohifjbpuerrzyg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_eohifjbpuerrzyg: orq %r8,%r8 je L$_after_reduction_eohifjbpuerrzyg vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_eohifjbpuerrzyg: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_3_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_acnffEtijrEjnxo vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_acnffEtijrEjnxo L$_16_blocks_overflow_acnffEtijrEjnxo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_acnffEtijrEjnxo: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_doyzohBGtCkjnqc subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_doyzohBGtCkjnqc L$_small_initial_partial_block_doyzohBGtCkjnqc: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_doyzohBGtCkjnqc: orq %r8,%r8 je L$_after_reduction_doyzohBGtCkjnqc vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_doyzohBGtCkjnqc: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_4_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_uGhvhwlitDofjoE vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_uGhvhwlitDofjoE L$_16_blocks_overflow_uGhvhwlitDofjoE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_uGhvhwlitDofjoE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_CsCwmBEowahhzih subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_CsCwmBEowahhzih L$_small_initial_partial_block_CsCwmBEowahhzih: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_CsCwmBEowahhzih: orq %r8,%r8 je L$_after_reduction_CsCwmBEowahhzih vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_CsCwmBEowahhzih: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_5_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_BwnlahcxoBDAelu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_BwnlahcxoBDAelu L$_16_blocks_overflow_BwnlahcxoBDAelu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_BwnlahcxoBDAelu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rBivbBgEnqzuoau subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rBivbBgEnqzuoau L$_small_initial_partial_block_rBivbBgEnqzuoau: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rBivbBgEnqzuoau: orq %r8,%r8 je L$_after_reduction_rBivbBgEnqzuoau vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rBivbBgEnqzuoau: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_6_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_ymfljrqweowoCvG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_ymfljrqweowoCvG L$_16_blocks_overflow_ymfljrqweowoCvG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_ymfljrqweowoCvG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_Dlbqsuajgnhvlny subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_Dlbqsuajgnhvlny L$_small_initial_partial_block_Dlbqsuajgnhvlny: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_Dlbqsuajgnhvlny: orq %r8,%r8 je L$_after_reduction_Dlbqsuajgnhvlny vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_Dlbqsuajgnhvlny: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_7_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_ijxrtlxzmzgCbiE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_ijxrtlxzmzgCbiE L$_16_blocks_overflow_ijxrtlxzmzgCbiE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_ijxrtlxzmzgCbiE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_Fpgnkfiyboaddsm subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_Fpgnkfiyboaddsm L$_small_initial_partial_block_Fpgnkfiyboaddsm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_Fpgnkfiyboaddsm: orq %r8,%r8 je L$_after_reduction_Fpgnkfiyboaddsm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_Fpgnkfiyboaddsm: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_8_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_zzfxscwhyoakGqc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_zzfxscwhyoakGqc L$_16_blocks_overflow_zzfxscwhyoakGqc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_zzfxscwhyoakGqc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xBdugFFrnyriCBE subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xBdugFFrnyriCBE L$_small_initial_partial_block_xBdugFFrnyriCBE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xBdugFFrnyriCBE: orq %r8,%r8 je L$_after_reduction_xBdugFFrnyriCBE vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_xBdugFFrnyriCBE: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_9_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_hswtkcnEneBfnil vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_hswtkcnEneBfnil L$_16_blocks_overflow_hswtkcnEneBfnil: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_hswtkcnEneBfnil: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_lgbbvgiAttomlsy subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_lgbbvgiAttomlsy L$_small_initial_partial_block_lgbbvgiAttomlsy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_lgbbvgiAttomlsy: orq %r8,%r8 je L$_after_reduction_lgbbvgiAttomlsy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_lgbbvgiAttomlsy: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_10_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_EBzDixsnrGlAsGi vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_EBzDixsnrGlAsGi L$_16_blocks_overflow_EBzDixsnrGlAsGi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_EBzDixsnrGlAsGi: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xdsDxBzahxmzysb subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xdsDxBzahxmzysb L$_small_initial_partial_block_xdsDxBzahxmzysb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xdsDxBzahxmzysb: orq %r8,%r8 je L$_after_reduction_xdsDxBzahxmzysb vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_xdsDxBzahxmzysb: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_11_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_qyEwjvzrfEfrwlG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_qyEwjvzrfEfrwlG L$_16_blocks_overflow_qyEwjvzrfEfrwlG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_qyEwjvzrfEfrwlG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_cmfyhuncjqoAhuh subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_cmfyhuncjqoAhuh L$_small_initial_partial_block_cmfyhuncjqoAhuh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cmfyhuncjqoAhuh: orq %r8,%r8 je L$_after_reduction_cmfyhuncjqoAhuh vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_cmfyhuncjqoAhuh: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_12_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_bcstjouersAefmz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_bcstjouersAefmz L$_16_blocks_overflow_bcstjouersAefmz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_bcstjouersAefmz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iFhieggherswFAm subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iFhieggherswFAm L$_small_initial_partial_block_iFhieggherswFAm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iFhieggherswFAm: orq %r8,%r8 je L$_after_reduction_iFhieggherswFAm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iFhieggherswFAm: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_13_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_rymwDrficveEDaj vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_rymwDrficveEDaj L$_16_blocks_overflow_rymwDrficveEDaj: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_rymwDrficveEDaj: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_aevCxqqBBnzfjmB subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_aevCxqqBBnzfjmB L$_small_initial_partial_block_aevCxqqBBnzfjmB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_aevCxqqBBnzfjmB: orq %r8,%r8 je L$_after_reduction_aevCxqqBBnzfjmB vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_aevCxqqBBnzfjmB: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_14_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_kzfnwbigglfewrl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_kzfnwbigglfewrl L$_16_blocks_overflow_kzfnwbigglfewrl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_kzfnwbigglfewrl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_BirmupqDcbxwtda subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_BirmupqDcbxwtda L$_small_initial_partial_block_BirmupqDcbxwtda: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_BirmupqDcbxwtda: orq %r8,%r8 je L$_after_reduction_BirmupqDcbxwtda vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_BirmupqDcbxwtda: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_15_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_zpEbDAveGDqklle vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_zpEbDAveGDqklle L$_16_blocks_overflow_zpEbDAveGDqklle: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_zpEbDAveGDqklle: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_Djympovkdexblck subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_Djympovkdexblck L$_small_initial_partial_block_Djympovkdexblck: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_Djympovkdexblck: orq %r8,%r8 je L$_after_reduction_Djympovkdexblck vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_Djympovkdexblck: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_16_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_bjFGibBdktCEryt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_bjFGibBdktCEryt L$_16_blocks_overflow_bjFGibBdktCEryt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_bjFGibBdktCEryt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_pBmCpEpokBigCud: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pBmCpEpokBigCud: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_pBmCpEpokBigCud: jmp L$_last_blocks_done_CafpBFgwEozfiCz L$_last_num_blocks_is_0_CafpBFgwEozfiCz: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_CafpBFgwEozfiCz: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_keEetjmxflGqBfv L$_encrypt_16_blocks_keEetjmxflGqBfv: cmpb $240,%r15b jae L$_16_blocks_overflow_wmtckzeadccoCgk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_wmtckzeadccoCgk L$_16_blocks_overflow_wmtckzeadccoCgk: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_wmtckzeadccoCgk: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_adnhFzpjBkEnjlu cmpl $8,%r10d je L$_last_num_blocks_is_8_adnhFzpjBkEnjlu jb L$_last_num_blocks_is_7_1_adnhFzpjBkEnjlu cmpl $12,%r10d je L$_last_num_blocks_is_12_adnhFzpjBkEnjlu jb L$_last_num_blocks_is_11_9_adnhFzpjBkEnjlu cmpl $15,%r10d je L$_last_num_blocks_is_15_adnhFzpjBkEnjlu ja L$_last_num_blocks_is_16_adnhFzpjBkEnjlu cmpl $14,%r10d je L$_last_num_blocks_is_14_adnhFzpjBkEnjlu jmp L$_last_num_blocks_is_13_adnhFzpjBkEnjlu L$_last_num_blocks_is_11_9_adnhFzpjBkEnjlu: cmpl $10,%r10d je L$_last_num_blocks_is_10_adnhFzpjBkEnjlu ja L$_last_num_blocks_is_11_adnhFzpjBkEnjlu jmp L$_last_num_blocks_is_9_adnhFzpjBkEnjlu L$_last_num_blocks_is_7_1_adnhFzpjBkEnjlu: cmpl $4,%r10d je L$_last_num_blocks_is_4_adnhFzpjBkEnjlu jb L$_last_num_blocks_is_3_1_adnhFzpjBkEnjlu cmpl $6,%r10d ja L$_last_num_blocks_is_7_adnhFzpjBkEnjlu je L$_last_num_blocks_is_6_adnhFzpjBkEnjlu jmp L$_last_num_blocks_is_5_adnhFzpjBkEnjlu L$_last_num_blocks_is_3_1_adnhFzpjBkEnjlu: cmpl $2,%r10d ja L$_last_num_blocks_is_3_adnhFzpjBkEnjlu je L$_last_num_blocks_is_2_adnhFzpjBkEnjlu L$_last_num_blocks_is_1_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_GsxmuksbpmpGjAF vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_GsxmuksbpmpGjAF L$_16_blocks_overflow_GsxmuksbpmpGjAF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_GsxmuksbpmpGjAF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_fkgElsvknyCFraE subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_fkgElsvknyCFraE L$_small_initial_partial_block_fkgElsvknyCFraE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_fkgElsvknyCFraE L$_small_initial_compute_done_fkgElsvknyCFraE: L$_after_reduction_fkgElsvknyCFraE: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_2_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_EjdqvCnEusieimt vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_EjdqvCnEusieimt L$_16_blocks_overflow_EjdqvCnEusieimt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_EjdqvCnEusieimt: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rouAoqaCpdDxjzF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rouAoqaCpdDxjzF L$_small_initial_partial_block_rouAoqaCpdDxjzF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rouAoqaCpdDxjzF: orq %r8,%r8 je L$_after_reduction_rouAoqaCpdDxjzF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_rouAoqaCpdDxjzF: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_3_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_uctbCqtlugkklDD vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_uctbCqtlugkklDD L$_16_blocks_overflow_uctbCqtlugkklDD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_uctbCqtlugkklDD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zkAAeakisCCFqgf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zkAAeakisCCFqgf L$_small_initial_partial_block_zkAAeakisCCFqgf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zkAAeakisCCFqgf: orq %r8,%r8 je L$_after_reduction_zkAAeakisCCFqgf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_zkAAeakisCCFqgf: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_4_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_gaqeqvovBwleCnk vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_gaqeqvovBwleCnk L$_16_blocks_overflow_gaqeqvovBwleCnk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_gaqeqvovBwleCnk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_qihCqAlqxdsjyzm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_qihCqAlqxdsjyzm L$_small_initial_partial_block_qihCqAlqxdsjyzm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_qihCqAlqxdsjyzm: orq %r8,%r8 je L$_after_reduction_qihCqAlqxdsjyzm vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_qihCqAlqxdsjyzm: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_5_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_ocpzeCAdEaCuwqG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_ocpzeCAdEaCuwqG L$_16_blocks_overflow_ocpzeCAdEaCuwqG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_ocpzeCAdEaCuwqG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GgfcCeubxmwGabf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GgfcCeubxmwGabf L$_small_initial_partial_block_GgfcCeubxmwGabf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GgfcCeubxmwGabf: orq %r8,%r8 je L$_after_reduction_GgfcCeubxmwGabf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GgfcCeubxmwGabf: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_6_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_tCpEhfGhdbguevv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_tCpEhfGhdbguevv L$_16_blocks_overflow_tCpEhfGhdbguevv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_tCpEhfGhdbguevv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EcehrEgDvGgGxlr subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EcehrEgDvGgGxlr L$_small_initial_partial_block_EcehrEgDvGgGxlr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EcehrEgDvGgGxlr: orq %r8,%r8 je L$_after_reduction_EcehrEgDvGgGxlr vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EcehrEgDvGgGxlr: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_7_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_yhnxntsqCvqmnAv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_yhnxntsqCvqmnAv L$_16_blocks_overflow_yhnxntsqCvqmnAv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_yhnxntsqCvqmnAv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dpnDdmEjpiBlsff subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dpnDdmEjpiBlsff L$_small_initial_partial_block_dpnDdmEjpiBlsff: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dpnDdmEjpiBlsff: orq %r8,%r8 je L$_after_reduction_dpnDdmEjpiBlsff vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dpnDdmEjpiBlsff: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_8_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_qhecknjsAigbdvl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_qhecknjsAigbdvl L$_16_blocks_overflow_qhecknjsAigbdvl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_qhecknjsAigbdvl: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_agskGinasntEiCl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_agskGinasntEiCl L$_small_initial_partial_block_agskGinasntEiCl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_agskGinasntEiCl: orq %r8,%r8 je L$_after_reduction_agskGinasntEiCl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_agskGinasntEiCl: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_9_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_bFfGEAqbwowecqr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_bFfGEAqbwowecqr L$_16_blocks_overflow_bFfGEAqbwowecqr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_bFfGEAqbwowecqr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xuljsjGkGjfAtFa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xuljsjGkGjfAtFa L$_small_initial_partial_block_xuljsjGkGjfAtFa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xuljsjGkGjfAtFa: orq %r8,%r8 je L$_after_reduction_xuljsjGkGjfAtFa vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_xuljsjGkGjfAtFa: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_10_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_rvpoAkotkmdfoGD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_rvpoAkotkmdfoGD L$_16_blocks_overflow_rvpoAkotkmdfoGD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_rvpoAkotkmdfoGD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_xvxthCnBgzxznFe subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_xvxthCnBgzxznFe L$_small_initial_partial_block_xvxthCnBgzxznFe: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xvxthCnBgzxznFe: orq %r8,%r8 je L$_after_reduction_xvxthCnBgzxznFe vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_xvxthCnBgzxznFe: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_11_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_vfjpDwaAwwnfAie vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_vfjpDwaAwwnfAie L$_16_blocks_overflow_vfjpDwaAwwnfAie: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_vfjpDwaAwwnfAie: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_aBbqBjAzrxyDsyu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_aBbqBjAzrxyDsyu L$_small_initial_partial_block_aBbqBjAzrxyDsyu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_aBbqBjAzrxyDsyu: orq %r8,%r8 je L$_after_reduction_aBbqBjAzrxyDsyu vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_aBbqBjAzrxyDsyu: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_12_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_sxuCEDavBFjsEdv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_sxuCEDavBFjsEdv L$_16_blocks_overflow_sxuCEDavBFjsEdv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_sxuCEDavBFjsEdv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yChicojCCAAFCdn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yChicojCCAAFCdn L$_small_initial_partial_block_yChicojCCAAFCdn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yChicojCCAAFCdn: orq %r8,%r8 je L$_after_reduction_yChicojCCAAFCdn vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yChicojCCAAFCdn: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_13_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_qqAerGvEyeduCgs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_qqAerGvEyeduCgs L$_16_blocks_overflow_qqAerGvEyeduCgs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_qqAerGvEyeduCgs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zkhmCnldAfcumwl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zkhmCnldAfcumwl L$_small_initial_partial_block_zkhmCnldAfcumwl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zkhmCnldAfcumwl: orq %r8,%r8 je L$_after_reduction_zkhmCnldAfcumwl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_zkhmCnldAfcumwl: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_14_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_tiwfklfdCbEnvFe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_tiwfklfdCbEnvFe L$_16_blocks_overflow_tiwfklfdCbEnvFe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_tiwfklfdCbEnvFe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_fFfrqpdqbcvGzmv subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_fFfrqpdqbcvGzmv L$_small_initial_partial_block_fFfrqpdqbcvGzmv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_fFfrqpdqbcvGzmv: orq %r8,%r8 je L$_after_reduction_fFfrqpdqbcvGzmv vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_fFfrqpdqbcvGzmv: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_15_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_BatgsGhBnhqnqnx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_BatgsGhBnhqnqnx L$_16_blocks_overflow_BatgsGhBnhqnqnx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_BatgsGhBnhqnqnx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ysbBlvhzxEdeEFl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ysbBlvhzxEdeEFl L$_small_initial_partial_block_ysbBlvhzxEdeEFl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ysbBlvhzxEdeEFl: orq %r8,%r8 je L$_after_reduction_ysbBlvhzxEdeEFl vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_ysbBlvhzxEdeEFl: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_16_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_CuxvqEazAfGjsCp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_CuxvqEazAfGjsCp L$_16_blocks_overflow_CuxvqEazAfGjsCp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_CuxvqEazAfGjsCp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_cGvBxlvhpkhxlhv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cGvBxlvhpkhxlhv: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_cGvBxlvhpkhxlhv: jmp L$_last_blocks_done_adnhFzpjBkEnjlu L$_last_num_blocks_is_0_adnhFzpjBkEnjlu: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_adnhFzpjBkEnjlu: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_keEetjmxflGqBfv L$_message_below_32_blocks_keEetjmxflGqBfv: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz L$_skip_hkeys_precomputation_wDAhpcxxDdecsFn vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) L$_skip_hkeys_precomputation_wDAhpcxxDdecsFn: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je L$_last_num_blocks_is_0_uGbbotznadbtwnB cmpl $8,%r10d je L$_last_num_blocks_is_8_uGbbotznadbtwnB jb L$_last_num_blocks_is_7_1_uGbbotznadbtwnB cmpl $12,%r10d je L$_last_num_blocks_is_12_uGbbotznadbtwnB jb L$_last_num_blocks_is_11_9_uGbbotznadbtwnB cmpl $15,%r10d je L$_last_num_blocks_is_15_uGbbotznadbtwnB ja L$_last_num_blocks_is_16_uGbbotznadbtwnB cmpl $14,%r10d je L$_last_num_blocks_is_14_uGbbotznadbtwnB jmp L$_last_num_blocks_is_13_uGbbotznadbtwnB L$_last_num_blocks_is_11_9_uGbbotznadbtwnB: cmpl $10,%r10d je L$_last_num_blocks_is_10_uGbbotznadbtwnB ja L$_last_num_blocks_is_11_uGbbotznadbtwnB jmp L$_last_num_blocks_is_9_uGbbotznadbtwnB L$_last_num_blocks_is_7_1_uGbbotznadbtwnB: cmpl $4,%r10d je L$_last_num_blocks_is_4_uGbbotznadbtwnB jb L$_last_num_blocks_is_3_1_uGbbotznadbtwnB cmpl $6,%r10d ja L$_last_num_blocks_is_7_uGbbotznadbtwnB je L$_last_num_blocks_is_6_uGbbotznadbtwnB jmp L$_last_num_blocks_is_5_uGbbotznadbtwnB L$_last_num_blocks_is_3_1_uGbbotznadbtwnB: cmpl $2,%r10d ja L$_last_num_blocks_is_3_uGbbotznadbtwnB je L$_last_num_blocks_is_2_uGbbotznadbtwnB L$_last_num_blocks_is_1_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae L$_16_blocks_overflow_cChwjnmCkfzrqax vpaddd %xmm28,%xmm2,%xmm0 jmp L$_16_blocks_ok_cChwjnmCkfzrqax L$_16_blocks_overflow_cChwjnmCkfzrqax: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 L$_16_blocks_ok_cChwjnmCkfzrqax: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_EctpihchBbzjuhh subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EctpihchBbzjuhh L$_small_initial_partial_block_EctpihchBbzjuhh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp L$_after_reduction_EctpihchBbzjuhh L$_small_initial_compute_done_EctpihchBbzjuhh: L$_after_reduction_EctpihchBbzjuhh: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_2_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae L$_16_blocks_overflow_DqmtBvrcgAqmgdw vpaddd %ymm28,%ymm2,%ymm0 jmp L$_16_blocks_ok_DqmtBvrcgAqmgdw L$_16_blocks_overflow_DqmtBvrcgAqmgdw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 L$_16_blocks_ok_DqmtBvrcgAqmgdw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EyrkoajdjakxzkF subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EyrkoajdjakxzkF L$_small_initial_partial_block_EyrkoajdjakxzkF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EyrkoajdjakxzkF: orq %r8,%r8 je L$_after_reduction_EyrkoajdjakxzkF vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EyrkoajdjakxzkF: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_3_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae L$_16_blocks_overflow_hfDuCGGGEpbgAAo vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_hfDuCGGGEpbgAAo L$_16_blocks_overflow_hfDuCGGGEpbgAAo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_hfDuCGGGEpbgAAo: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GfsyFzqqokxFwFx subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GfsyFzqqokxFwFx L$_small_initial_partial_block_GfsyFzqqokxFwFx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GfsyFzqqokxFwFx: orq %r8,%r8 je L$_after_reduction_GfsyFzqqokxFwFx vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_GfsyFzqqokxFwFx: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_4_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae L$_16_blocks_overflow_wxaujbwbDrFxuhe vpaddd %zmm28,%zmm2,%zmm0 jmp L$_16_blocks_ok_wxaujbwbDrFxuhe L$_16_blocks_overflow_wxaujbwbDrFxuhe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 L$_16_blocks_ok_wxaujbwbDrFxuhe: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_tpadbsuBdepEgig subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_tpadbsuBdepEgig L$_small_initial_partial_block_tpadbsuBdepEgig: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_tpadbsuBdepEgig: orq %r8,%r8 je L$_after_reduction_tpadbsuBdepEgig vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_tpadbsuBdepEgig: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_5_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae L$_16_blocks_overflow_tEuoxeaCCDdhEFB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp L$_16_blocks_ok_tEuoxeaCCDdhEFB L$_16_blocks_overflow_tEuoxeaCCDdhEFB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 L$_16_blocks_ok_tEuoxeaCCDdhEFB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_srvwxdEwmxFwfhg subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_srvwxdEwmxFwfhg L$_small_initial_partial_block_srvwxdEwmxFwfhg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_srvwxdEwmxFwfhg: orq %r8,%r8 je L$_after_reduction_srvwxdEwmxFwfhg vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_srvwxdEwmxFwfhg: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_6_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae L$_16_blocks_overflow_prosxFkubabgvzg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp L$_16_blocks_ok_prosxFkubabgvzg L$_16_blocks_overflow_prosxFkubabgvzg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 L$_16_blocks_ok_prosxFkubabgvzg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hzkBexFaxhsbChs subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hzkBexFaxhsbChs L$_small_initial_partial_block_hzkBexFaxhsbChs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hzkBexFaxhsbChs: orq %r8,%r8 je L$_after_reduction_hzkBexFaxhsbChs vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_hzkBexFaxhsbChs: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_7_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae L$_16_blocks_overflow_aeeqyBehlbvnfnk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_aeeqyBehlbvnfnk L$_16_blocks_overflow_aeeqyBehlbvnfnk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_aeeqyBehlbvnfnk: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_AobapGtdiluagwq subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_AobapGtdiluagwq L$_small_initial_partial_block_AobapGtdiluagwq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_AobapGtdiluagwq: orq %r8,%r8 je L$_after_reduction_AobapGtdiluagwq vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_AobapGtdiluagwq: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_8_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae L$_16_blocks_overflow_rboylvBCxohyFxr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp L$_16_blocks_ok_rboylvBCxohyFxr L$_16_blocks_overflow_rboylvBCxohyFxr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 L$_16_blocks_ok_rboylvBCxohyFxr: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_iitjdnjexGtAzlA subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_iitjdnjexGtAzlA L$_small_initial_partial_block_iitjdnjexGtAzlA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_iitjdnjexGtAzlA: orq %r8,%r8 je L$_after_reduction_iitjdnjexGtAzlA vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_iitjdnjexGtAzlA: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_9_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae L$_16_blocks_overflow_kwzbcrnlszssDoA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp L$_16_blocks_ok_kwzbcrnlszssDoA L$_16_blocks_overflow_kwzbcrnlszssDoA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 L$_16_blocks_ok_kwzbcrnlszssDoA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_EFvfhGiioywrajC subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_EFvfhGiioywrajC L$_small_initial_partial_block_EFvfhGiioywrajC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_EFvfhGiioywrajC: orq %r8,%r8 je L$_after_reduction_EFvfhGiioywrajC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_EFvfhGiioywrajC: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_10_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae L$_16_blocks_overflow_hrbjfpBdCjiGnfs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp L$_16_blocks_ok_hrbjfpBdCjiGnfs L$_16_blocks_overflow_hrbjfpBdCjiGnfs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 L$_16_blocks_ok_hrbjfpBdCjiGnfs: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FvsGiljtuwAeGuy subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FvsGiljtuwAeGuy L$_small_initial_partial_block_FvsGiljtuwAeGuy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FvsGiljtuwAeGuy: orq %r8,%r8 je L$_after_reduction_FvsGiljtuwAeGuy vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_FvsGiljtuwAeGuy: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_11_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae L$_16_blocks_overflow_gffyuiCaEymxbgx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_gffyuiCaEymxbgx L$_16_blocks_overflow_gffyuiCaEymxbgx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_gffyuiCaEymxbgx: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yskGpojraEjuoeD subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yskGpojraEjuoeD L$_small_initial_partial_block_yskGpojraEjuoeD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yskGpojraEjuoeD: orq %r8,%r8 je L$_after_reduction_yskGpojraEjuoeD vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_yskGpojraEjuoeD: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_12_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae L$_16_blocks_overflow_hAjEfcezvfywBbB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp L$_16_blocks_ok_hAjEfcezvfywBbB L$_16_blocks_overflow_hAjEfcezvfywBbB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 L$_16_blocks_ok_hAjEfcezvfywBbB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_dkgezBnEGtEnaGC subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_dkgezBnEGtEnaGC L$_small_initial_partial_block_dkgezBnEGtEnaGC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_dkgezBnEGtEnaGC: orq %r8,%r8 je L$_after_reduction_dkgezBnEGtEnaGC vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_dkgezBnEGtEnaGC: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_13_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae L$_16_blocks_overflow_jsBqmgCzCrGvyyA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp L$_16_blocks_ok_jsBqmgCzCrGvyyA L$_16_blocks_overflow_jsBqmgCzCrGvyyA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 L$_16_blocks_ok_jsBqmgCzCrGvyyA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_gEgFBvntjyjbGji subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_gEgFBvntjyjbGji L$_small_initial_partial_block_gEgFBvntjyjbGji: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_gEgFBvntjyjbGji: orq %r8,%r8 je L$_after_reduction_gEgFBvntjyjbGji vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_gEgFBvntjyjbGji: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_14_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae L$_16_blocks_overflow_muGuwhaFlxCtAii vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp L$_16_blocks_ok_muGuwhaFlxCtAii L$_16_blocks_overflow_muGuwhaFlxCtAii: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 L$_16_blocks_ok_muGuwhaFlxCtAii: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zumDfmCofGawimf subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zumDfmCofGawimf L$_small_initial_partial_block_zumDfmCofGawimf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zumDfmCofGawimf: orq %r8,%r8 je L$_after_reduction_zumDfmCofGawimf vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_zumDfmCofGawimf: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_15_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae L$_16_blocks_overflow_EpbiipkiGBkrvEx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_EpbiipkiGBkrvEx L$_16_blocks_overflow_EpbiipkiGBkrvEx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_EpbiipkiGBkrvEx: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_wohgmjgdAjDrcfv subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_wohgmjgdAjDrcfv L$_small_initial_partial_block_wohgmjgdAjDrcfv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_wohgmjgdAjDrcfv: orq %r8,%r8 je L$_after_reduction_wohgmjgdAjDrcfv vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_wohgmjgdAjDrcfv: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_16_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae L$_16_blocks_overflow_etuAklrEovqCDpq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp L$_16_blocks_ok_etuAklrEovqCDpq L$_16_blocks_overflow_etuAklrEovqCDpq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 L$_16_blocks_ok_etuAklrEovqCDpq: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_xGuCpnrvibyoyay: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_xGuCpnrvibyoyay: vpxorq %xmm7,%xmm14,%xmm14 L$_after_reduction_xGuCpnrvibyoyay: jmp L$_last_blocks_done_uGbbotznadbtwnB L$_last_num_blocks_is_0_uGbbotznadbtwnB: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 L$_last_blocks_done_uGbbotznadbtwnB: vpshufb %xmm29,%xmm2,%xmm2 jmp L$_ghash_done_keEetjmxflGqBfv L$_message_below_equal_16_blocks_keEetjmxflGqBfv: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je L$_small_initial_num_blocks_is_8_vkDeiBlhaznkthD jl L$_small_initial_num_blocks_is_7_1_vkDeiBlhaznkthD cmpq $12,%r12 je L$_small_initial_num_blocks_is_12_vkDeiBlhaznkthD jl L$_small_initial_num_blocks_is_11_9_vkDeiBlhaznkthD cmpq $16,%r12 je L$_small_initial_num_blocks_is_16_vkDeiBlhaznkthD cmpq $15,%r12 je L$_small_initial_num_blocks_is_15_vkDeiBlhaznkthD cmpq $14,%r12 je L$_small_initial_num_blocks_is_14_vkDeiBlhaznkthD jmp L$_small_initial_num_blocks_is_13_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_11_9_vkDeiBlhaznkthD: cmpq $11,%r12 je L$_small_initial_num_blocks_is_11_vkDeiBlhaznkthD cmpq $10,%r12 je L$_small_initial_num_blocks_is_10_vkDeiBlhaznkthD jmp L$_small_initial_num_blocks_is_9_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_7_1_vkDeiBlhaznkthD: cmpq $4,%r12 je L$_small_initial_num_blocks_is_4_vkDeiBlhaznkthD jl L$_small_initial_num_blocks_is_3_1_vkDeiBlhaznkthD cmpq $7,%r12 je L$_small_initial_num_blocks_is_7_vkDeiBlhaznkthD cmpq $6,%r12 je L$_small_initial_num_blocks_is_6_vkDeiBlhaznkthD jmp L$_small_initial_num_blocks_is_5_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_3_1_vkDeiBlhaznkthD: cmpq $3,%r12 je L$_small_initial_num_blocks_is_3_vkDeiBlhaznkthD cmpq $2,%r12 je L$_small_initial_num_blocks_is_2_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_1_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm6,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl L$_small_initial_partial_block_pelykqxdehCqvkk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pelykqxdehCqvkk L$_small_initial_partial_block_pelykqxdehCqvkk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp L$_after_reduction_pelykqxdehCqvkk L$_small_initial_compute_done_pelykqxdehCqvkk: L$_after_reduction_pelykqxdehCqvkk: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_2_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm6,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ldyuFtpzipDvehA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ldyuFtpzipDvehA L$_small_initial_partial_block_ldyuFtpzipDvehA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ldyuFtpzipDvehA: orq %r8,%r8 je L$_after_reduction_ldyuFtpzipDvehA vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_ldyuFtpzipDvehA: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_3_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_trwwageihBqcfkh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_trwwageihBqcfkh L$_small_initial_partial_block_trwwageihBqcfkh: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_trwwageihBqcfkh: orq %r8,%r8 je L$_after_reduction_trwwageihBqcfkh vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_trwwageihBqcfkh: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_4_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_yotsdxeGEAxlmrj subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_yotsdxeGEAxlmrj L$_small_initial_partial_block_yotsdxeGEAxlmrj: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_yotsdxeGEAxlmrj: orq %r8,%r8 je L$_after_reduction_yotsdxeGEAxlmrj vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_yotsdxeGEAxlmrj: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_5_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %xmm29,%xmm7,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_rmwxtkgdnBhEnAk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_rmwxtkgdnBhEnAk L$_small_initial_partial_block_rmwxtkgdnBhEnAk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_rmwxtkgdnBhEnAk: orq %r8,%r8 je L$_after_reduction_rmwxtkgdnBhEnAk vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_rmwxtkgdnBhEnAk: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_6_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %ymm29,%ymm7,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_hGvwkbDFDGzDyAp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_hGvwkbDFDGzDyAp L$_small_initial_partial_block_hGvwkbDFDGzDyAp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_hGvwkbDFDGzDyAp: orq %r8,%r8 je L$_after_reduction_hGvwkbDFDGzDyAp vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_hGvwkbDFDGzDyAp: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_7_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_FchyqAlDxAtkgym subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_FchyqAlDxAtkgym L$_small_initial_partial_block_FchyqAlDxAtkgym: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_FchyqAlDxAtkgym: orq %r8,%r8 je L$_after_reduction_FchyqAlDxAtkgym vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_FchyqAlDxAtkgym: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_8_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_ChlBCihfFcxfpre subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_ChlBCihfFcxfpre L$_small_initial_partial_block_ChlBCihfFcxfpre: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_ChlBCihfFcxfpre: orq %r8,%r8 je L$_after_reduction_ChlBCihfFcxfpre vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_ChlBCihfFcxfpre: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_9_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %xmm29,%xmm10,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_trxojfuEtotExGB subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_trxojfuEtotExGB L$_small_initial_partial_block_trxojfuEtotExGB: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_trxojfuEtotExGB: orq %r8,%r8 je L$_after_reduction_trxojfuEtotExGB vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_trxojfuEtotExGB: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_10_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %ymm29,%ymm10,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_zdivCCwEFvrsaiu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_zdivCCwEFvrsaiu L$_small_initial_partial_block_zdivCCwEFvrsaiu: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_zdivCCwEFvrsaiu: orq %r8,%r8 je L$_after_reduction_zdivCCwEFvrsaiu vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_zdivCCwEFvrsaiu: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_11_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_cbByewwahwBzpzx subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_cbByewwahwBzpzx L$_small_initial_partial_block_cbByewwahwBzpzx: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_cbByewwahwBzpzx: orq %r8,%r8 je L$_after_reduction_cbByewwahwBzpzx vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_cbByewwahwBzpzx: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_12_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_vpiEDoFuFgdvCsg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_vpiEDoFuFgdvCsg L$_small_initial_partial_block_vpiEDoFuFgdvCsg: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_vpiEDoFuFgdvCsg: orq %r8,%r8 je L$_after_reduction_vpiEDoFuFgdvCsg vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_vpiEDoFuFgdvCsg: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_13_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %xmm29,%xmm11,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_pkeazcEqwkcpavG subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_pkeazcEqwkcpavG L$_small_initial_partial_block_pkeazcEqwkcpavG: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_pkeazcEqwkcpavG: orq %r8,%r8 je L$_after_reduction_pkeazcEqwkcpavG vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_pkeazcEqwkcpavG: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_14_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %ymm29,%ymm11,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_GsuCukqqbwGpxDi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_GsuCukqqbwGpxDi L$_small_initial_partial_block_GsuCukqqbwGpxDi: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_GsuCukqqbwGpxDi: orq %r8,%r8 je L$_after_reduction_GsuCukqqbwGpxDi vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_GsuCukqqbwGpxDi: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_15_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl L$_small_initial_partial_block_mbxlopCmuqdpqjz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp L$_small_initial_compute_done_mbxlopCmuqdpqjz L$_small_initial_partial_block_mbxlopCmuqdpqjz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_mbxlopCmuqdpqjz: orq %r8,%r8 je L$_after_reduction_mbxlopCmuqdpqjz vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_mbxlopCmuqdpqjz: jmp L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD L$_small_initial_num_blocks_is_16_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 L$_small_initial_partial_block_fpGgFAenBuAyutw: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 L$_small_initial_compute_done_fpGgFAenBuAyutw: vpxorq %xmm13,%xmm14,%xmm14 L$_after_reduction_fpGgFAenBuAyutw: L$_small_initial_blocks_encrypted_vkDeiBlhaznkthD: L$_ghash_done_keEetjmxflGqBfv: vmovdqu64 %xmm2,0(%rsi) L$_enc_dec_done_keEetjmxflGqBfv: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) L$_enc_dec_abort_keEetjmxflGqBfv: jmp L$exit_gcm_decrypt L$exit_gcm_decrypt: cmpq $256,%r8 jbe L$skip_hkeys_cleanup_byhoEGxnfawfFqd vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %zmm0,0(%rsp) vmovdqa64 %zmm0,64(%rsp) vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) vmovdqa64 %zmm0,320(%rsp) vmovdqa64 %zmm0,384(%rsp) vmovdqa64 %zmm0,448(%rsp) vmovdqa64 %zmm0,512(%rsp) vmovdqa64 %zmm0,576(%rsp) vmovdqa64 %zmm0,640(%rsp) vmovdqa64 %zmm0,704(%rsp) L$skip_hkeys_cleanup_byhoEGxnfawfFqd: vzeroupper leaq (%rbp),%rsp popq %r15 popq %r14 popq %r13 popq %r12 popq %rbp popq %rbx .byte 0xf3,0xc3 L$decrypt_seh_end: .section __DATA,__const .p2align 4 POLY:.quad 0x0000000000000001, 0xC200000000000000 .p2align 6 POLY2: .quad 0x00000001C2000000, 0xC200000000000000 .quad 0x00000001C2000000, 0xC200000000000000 .quad 0x00000001C2000000, 0xC200000000000000 .quad 0x00000001C2000000, 0xC200000000000000 .p2align 4 TWOONE:.quad 0x0000000000000001, 0x0000000100000000 .p2align 6 SHUF_MASK: .quad 0x08090A0B0C0D0E0F, 0x0001020304050607 .quad 0x08090A0B0C0D0E0F, 0x0001020304050607 .quad 0x08090A0B0C0D0E0F, 0x0001020304050607 .quad 0x08090A0B0C0D0E0F, 0x0001020304050607 .p2align 4 SHIFT_MASK: .quad 0x0706050403020100, 0x0f0e0d0c0b0a0908 ALL_F: .quad 0xffffffffffffffff, 0xffffffffffffffff ZERO: .quad 0x0000000000000000, 0x0000000000000000 .p2align 4 ONEa: .quad 0x0000000000000001, 0x0000000000000000 .p2align 4 ONEf: .quad 0x0000000000000000, 0x0100000000000000 .p2align 6 ddq_add_1234: .quad 0x0000000000000001, 0x0000000000000000 .quad 0x0000000000000002, 0x0000000000000000 .quad 0x0000000000000003, 0x0000000000000000 .quad 0x0000000000000004, 0x0000000000000000 .p2align 6 ddq_add_5678: .quad 0x0000000000000005, 0x0000000000000000 .quad 0x0000000000000006, 0x0000000000000000 .quad 0x0000000000000007, 0x0000000000000000 .quad 0x0000000000000008, 0x0000000000000000 .p2align 6 ddq_add_4444: .quad 0x0000000000000004, 0x0000000000000000 .quad 0x0000000000000004, 0x0000000000000000 .quad 0x0000000000000004, 0x0000000000000000 .quad 0x0000000000000004, 0x0000000000000000 .p2align 6 ddq_add_8888: .quad 0x0000000000000008, 0x0000000000000000 .quad 0x0000000000000008, 0x0000000000000000 .quad 0x0000000000000008, 0x0000000000000000 .quad 0x0000000000000008, 0x0000000000000000 .p2align 6 ddq_addbe_1234: .quad 0x0000000000000000, 0x0100000000000000 .quad 0x0000000000000000, 0x0200000000000000 .quad 0x0000000000000000, 0x0300000000000000 .quad 0x0000000000000000, 0x0400000000000000 .p2align 6 ddq_addbe_4444: .quad 0x0000000000000000, 0x0400000000000000 .quad 0x0000000000000000, 0x0400000000000000 .quad 0x0000000000000000, 0x0400000000000000 .quad 0x0000000000000000, 0x0400000000000000 .p2align 6 byte_len_to_mask_table: .value 0x0000, 0x0001, 0x0003, 0x0007 .value 0x000f, 0x001f, 0x003f, 0x007f .value 0x00ff, 0x01ff, 0x03ff, 0x07ff .value 0x0fff, 0x1fff, 0x3fff, 0x7fff .value 0xffff .p2align 6 byte64_len_to_mask_table: .quad 0x0000000000000000, 0x0000000000000001 .quad 0x0000000000000003, 0x0000000000000007 .quad 0x000000000000000f, 0x000000000000001f .quad 0x000000000000003f, 0x000000000000007f .quad 0x00000000000000ff, 0x00000000000001ff .quad 0x00000000000003ff, 0x00000000000007ff .quad 0x0000000000000fff, 0x0000000000001fff .quad 0x0000000000003fff, 0x0000000000007fff .quad 0x000000000000ffff, 0x000000000001ffff .quad 0x000000000003ffff, 0x000000000007ffff .quad 0x00000000000fffff, 0x00000000001fffff .quad 0x00000000003fffff, 0x00000000007fffff .quad 0x0000000000ffffff, 0x0000000001ffffff .quad 0x0000000003ffffff, 0x0000000007ffffff .quad 0x000000000fffffff, 0x000000001fffffff .quad 0x000000003fffffff, 0x000000007fffffff .quad 0x00000000ffffffff, 0x00000001ffffffff .quad 0x00000003ffffffff, 0x00000007ffffffff .quad 0x0000000fffffffff, 0x0000001fffffffff .quad 0x0000003fffffffff, 0x0000007fffffffff .quad 0x000000ffffffffff, 0x000001ffffffffff .quad 0x000003ffffffffff, 0x000007ffffffffff .quad 0x00000fffffffffff, 0x00001fffffffffff .quad 0x00003fffffffffff, 0x00007fffffffffff .quad 0x0000ffffffffffff, 0x0001ffffffffffff .quad 0x0003ffffffffffff, 0x0007ffffffffffff .quad 0x000fffffffffffff, 0x001fffffffffffff .quad 0x003fffffffffffff, 0x007fffffffffffff .quad 0x00ffffffffffffff, 0x01ffffffffffffff .quad 0x03ffffffffffffff, 0x07ffffffffffffff .quad 0x0fffffffffffffff, 0x1fffffffffffffff .quad 0x3fffffffffffffff, 0x7fffffffffffffff .quad 0xffffffffffffffff .text #endif #endif
marvin-hansen/iggy-streaming-system
5,159
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _gcm_gmult_ssse3 .private_extern _gcm_gmult_ssse3 .p2align 4 _gcm_gmult_ssse3: _CET_ENDBR movdqu (%rdi),%xmm0 movdqa L$reverse_bytes(%rip),%xmm10 movdqa L$low4_mask(%rip),%xmm2 .byte 102,65,15,56,0,194 movdqa %xmm2,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm2,%xmm0 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 movq $5,%rax L$oop_row_1: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz L$oop_row_1 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movq $5,%rax L$oop_row_2: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz L$oop_row_2 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movq $6,%rax L$oop_row_3: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz L$oop_row_3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 .byte 102,65,15,56,0,210 movdqu %xmm2,(%rdi) pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 .byte 0xf3,0xc3 .globl _gcm_ghash_ssse3 .private_extern _gcm_ghash_ssse3 .p2align 4 _gcm_ghash_ssse3: _CET_ENDBR movdqu (%rdi),%xmm0 movdqa L$reverse_bytes(%rip),%xmm10 movdqa L$low4_mask(%rip),%xmm11 andq $-16,%rcx .byte 102,65,15,56,0,194 pxor %xmm3,%xmm3 L$oop_ghash: movdqu (%rdx),%xmm1 .byte 102,65,15,56,0,202 pxor %xmm1,%xmm0 movdqa %xmm11,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm11,%xmm0 pxor %xmm2,%xmm2 movq $5,%rax L$oop_row_4: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz L$oop_row_4 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movq $5,%rax L$oop_row_5: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz L$oop_row_5 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movq $6,%rax L$oop_row_6: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz L$oop_row_6 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movdqa %xmm2,%xmm0 leaq -256(%rsi),%rsi leaq 16(%rdx),%rdx subq $16,%rcx jnz L$oop_ghash .byte 102,65,15,56,0,194 movdqu %xmm0,(%rdi) pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 .byte 0xf3,0xc3 .section __DATA,__const .p2align 4 L$reverse_bytes: .byte 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 L$low4_mask: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f .text #endif
marvin-hansen/iggy-streaming-system
12,480
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/md5-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .p2align 4 .globl _md5_block_asm_data_order .private_extern _md5_block_asm_data_order _md5_block_asm_data_order: _CET_ENDBR pushq %rbp pushq %rbx pushq %r12 pushq %r14 pushq %r15 L$prologue: movq %rdi,%rbp shlq $6,%rdx leaq (%rsi,%rdx,1),%rdi movl 0(%rbp),%eax movl 4(%rbp),%ebx movl 8(%rbp),%ecx movl 12(%rbp),%edx cmpq %rdi,%rsi je L$end L$loop: movl %eax,%r8d movl %ebx,%r9d movl %ecx,%r14d movl %edx,%r15d movl 0(%rsi),%r10d movl %edx,%r11d xorl %ecx,%r11d leal -680876936(%rax,%r10,1),%eax andl %ebx,%r11d xorl %edx,%r11d movl 4(%rsi),%r10d addl %r11d,%eax roll $7,%eax movl %ecx,%r11d addl %ebx,%eax xorl %ebx,%r11d leal -389564586(%rdx,%r10,1),%edx andl %eax,%r11d xorl %ecx,%r11d movl 8(%rsi),%r10d addl %r11d,%edx roll $12,%edx movl %ebx,%r11d addl %eax,%edx xorl %eax,%r11d leal 606105819(%rcx,%r10,1),%ecx andl %edx,%r11d xorl %ebx,%r11d movl 12(%rsi),%r10d addl %r11d,%ecx roll $17,%ecx movl %eax,%r11d addl %edx,%ecx xorl %edx,%r11d leal -1044525330(%rbx,%r10,1),%ebx andl %ecx,%r11d xorl %eax,%r11d movl 16(%rsi),%r10d addl %r11d,%ebx roll $22,%ebx movl %edx,%r11d addl %ecx,%ebx xorl %ecx,%r11d leal -176418897(%rax,%r10,1),%eax andl %ebx,%r11d xorl %edx,%r11d movl 20(%rsi),%r10d addl %r11d,%eax roll $7,%eax movl %ecx,%r11d addl %ebx,%eax xorl %ebx,%r11d leal 1200080426(%rdx,%r10,1),%edx andl %eax,%r11d xorl %ecx,%r11d movl 24(%rsi),%r10d addl %r11d,%edx roll $12,%edx movl %ebx,%r11d addl %eax,%edx xorl %eax,%r11d leal -1473231341(%rcx,%r10,1),%ecx andl %edx,%r11d xorl %ebx,%r11d movl 28(%rsi),%r10d addl %r11d,%ecx roll $17,%ecx movl %eax,%r11d addl %edx,%ecx xorl %edx,%r11d leal -45705983(%rbx,%r10,1),%ebx andl %ecx,%r11d xorl %eax,%r11d movl 32(%rsi),%r10d addl %r11d,%ebx roll $22,%ebx movl %edx,%r11d addl %ecx,%ebx xorl %ecx,%r11d leal 1770035416(%rax,%r10,1),%eax andl %ebx,%r11d xorl %edx,%r11d movl 36(%rsi),%r10d addl %r11d,%eax roll $7,%eax movl %ecx,%r11d addl %ebx,%eax xorl %ebx,%r11d leal -1958414417(%rdx,%r10,1),%edx andl %eax,%r11d xorl %ecx,%r11d movl 40(%rsi),%r10d addl %r11d,%edx roll $12,%edx movl %ebx,%r11d addl %eax,%edx xorl %eax,%r11d leal -42063(%rcx,%r10,1),%ecx andl %edx,%r11d xorl %ebx,%r11d movl 44(%rsi),%r10d addl %r11d,%ecx roll $17,%ecx movl %eax,%r11d addl %edx,%ecx xorl %edx,%r11d leal -1990404162(%rbx,%r10,1),%ebx andl %ecx,%r11d xorl %eax,%r11d movl 48(%rsi),%r10d addl %r11d,%ebx roll $22,%ebx movl %edx,%r11d addl %ecx,%ebx xorl %ecx,%r11d leal 1804603682(%rax,%r10,1),%eax andl %ebx,%r11d xorl %edx,%r11d movl 52(%rsi),%r10d addl %r11d,%eax roll $7,%eax movl %ecx,%r11d addl %ebx,%eax xorl %ebx,%r11d leal -40341101(%rdx,%r10,1),%edx andl %eax,%r11d xorl %ecx,%r11d movl 56(%rsi),%r10d addl %r11d,%edx roll $12,%edx movl %ebx,%r11d addl %eax,%edx xorl %eax,%r11d leal -1502002290(%rcx,%r10,1),%ecx andl %edx,%r11d xorl %ebx,%r11d movl 60(%rsi),%r10d addl %r11d,%ecx roll $17,%ecx movl %eax,%r11d addl %edx,%ecx xorl %edx,%r11d leal 1236535329(%rbx,%r10,1),%ebx andl %ecx,%r11d xorl %eax,%r11d movl 0(%rsi),%r10d addl %r11d,%ebx roll $22,%ebx movl %edx,%r11d addl %ecx,%ebx movl 4(%rsi),%r10d movl %edx,%r11d movl %edx,%r12d notl %r11d leal -165796510(%rax,%r10,1),%eax andl %ebx,%r12d andl %ecx,%r11d movl 24(%rsi),%r10d orl %r11d,%r12d movl %ecx,%r11d addl %r12d,%eax movl %ecx,%r12d roll $5,%eax addl %ebx,%eax notl %r11d leal -1069501632(%rdx,%r10,1),%edx andl %eax,%r12d andl %ebx,%r11d movl 44(%rsi),%r10d orl %r11d,%r12d movl %ebx,%r11d addl %r12d,%edx movl %ebx,%r12d roll $9,%edx addl %eax,%edx notl %r11d leal 643717713(%rcx,%r10,1),%ecx andl %edx,%r12d andl %eax,%r11d movl 0(%rsi),%r10d orl %r11d,%r12d movl %eax,%r11d addl %r12d,%ecx movl %eax,%r12d roll $14,%ecx addl %edx,%ecx notl %r11d leal -373897302(%rbx,%r10,1),%ebx andl %ecx,%r12d andl %edx,%r11d movl 20(%rsi),%r10d orl %r11d,%r12d movl %edx,%r11d addl %r12d,%ebx movl %edx,%r12d roll $20,%ebx addl %ecx,%ebx notl %r11d leal -701558691(%rax,%r10,1),%eax andl %ebx,%r12d andl %ecx,%r11d movl 40(%rsi),%r10d orl %r11d,%r12d movl %ecx,%r11d addl %r12d,%eax movl %ecx,%r12d roll $5,%eax addl %ebx,%eax notl %r11d leal 38016083(%rdx,%r10,1),%edx andl %eax,%r12d andl %ebx,%r11d movl 60(%rsi),%r10d orl %r11d,%r12d movl %ebx,%r11d addl %r12d,%edx movl %ebx,%r12d roll $9,%edx addl %eax,%edx notl %r11d leal -660478335(%rcx,%r10,1),%ecx andl %edx,%r12d andl %eax,%r11d movl 16(%rsi),%r10d orl %r11d,%r12d movl %eax,%r11d addl %r12d,%ecx movl %eax,%r12d roll $14,%ecx addl %edx,%ecx notl %r11d leal -405537848(%rbx,%r10,1),%ebx andl %ecx,%r12d andl %edx,%r11d movl 36(%rsi),%r10d orl %r11d,%r12d movl %edx,%r11d addl %r12d,%ebx movl %edx,%r12d roll $20,%ebx addl %ecx,%ebx notl %r11d leal 568446438(%rax,%r10,1),%eax andl %ebx,%r12d andl %ecx,%r11d movl 56(%rsi),%r10d orl %r11d,%r12d movl %ecx,%r11d addl %r12d,%eax movl %ecx,%r12d roll $5,%eax addl %ebx,%eax notl %r11d leal -1019803690(%rdx,%r10,1),%edx andl %eax,%r12d andl %ebx,%r11d movl 12(%rsi),%r10d orl %r11d,%r12d movl %ebx,%r11d addl %r12d,%edx movl %ebx,%r12d roll $9,%edx addl %eax,%edx notl %r11d leal -187363961(%rcx,%r10,1),%ecx andl %edx,%r12d andl %eax,%r11d movl 32(%rsi),%r10d orl %r11d,%r12d movl %eax,%r11d addl %r12d,%ecx movl %eax,%r12d roll $14,%ecx addl %edx,%ecx notl %r11d leal 1163531501(%rbx,%r10,1),%ebx andl %ecx,%r12d andl %edx,%r11d movl 52(%rsi),%r10d orl %r11d,%r12d movl %edx,%r11d addl %r12d,%ebx movl %edx,%r12d roll $20,%ebx addl %ecx,%ebx notl %r11d leal -1444681467(%rax,%r10,1),%eax andl %ebx,%r12d andl %ecx,%r11d movl 8(%rsi),%r10d orl %r11d,%r12d movl %ecx,%r11d addl %r12d,%eax movl %ecx,%r12d roll $5,%eax addl %ebx,%eax notl %r11d leal -51403784(%rdx,%r10,1),%edx andl %eax,%r12d andl %ebx,%r11d movl 28(%rsi),%r10d orl %r11d,%r12d movl %ebx,%r11d addl %r12d,%edx movl %ebx,%r12d roll $9,%edx addl %eax,%edx notl %r11d leal 1735328473(%rcx,%r10,1),%ecx andl %edx,%r12d andl %eax,%r11d movl 48(%rsi),%r10d orl %r11d,%r12d movl %eax,%r11d addl %r12d,%ecx movl %eax,%r12d roll $14,%ecx addl %edx,%ecx notl %r11d leal -1926607734(%rbx,%r10,1),%ebx andl %ecx,%r12d andl %edx,%r11d movl 0(%rsi),%r10d orl %r11d,%r12d movl %edx,%r11d addl %r12d,%ebx movl %edx,%r12d roll $20,%ebx addl %ecx,%ebx movl 20(%rsi),%r10d movl %ecx,%r11d leal -378558(%rax,%r10,1),%eax movl 32(%rsi),%r10d xorl %edx,%r11d xorl %ebx,%r11d addl %r11d,%eax roll $4,%eax movl %ebx,%r11d addl %ebx,%eax leal -2022574463(%rdx,%r10,1),%edx movl 44(%rsi),%r10d xorl %ecx,%r11d xorl %eax,%r11d addl %r11d,%edx roll $11,%edx movl %eax,%r11d addl %eax,%edx leal 1839030562(%rcx,%r10,1),%ecx movl 56(%rsi),%r10d xorl %ebx,%r11d xorl %edx,%r11d addl %r11d,%ecx roll $16,%ecx movl %edx,%r11d addl %edx,%ecx leal -35309556(%rbx,%r10,1),%ebx movl 4(%rsi),%r10d xorl %eax,%r11d xorl %ecx,%r11d addl %r11d,%ebx roll $23,%ebx movl %ecx,%r11d addl %ecx,%ebx leal -1530992060(%rax,%r10,1),%eax movl 16(%rsi),%r10d xorl %edx,%r11d xorl %ebx,%r11d addl %r11d,%eax roll $4,%eax movl %ebx,%r11d addl %ebx,%eax leal 1272893353(%rdx,%r10,1),%edx movl 28(%rsi),%r10d xorl %ecx,%r11d xorl %eax,%r11d addl %r11d,%edx roll $11,%edx movl %eax,%r11d addl %eax,%edx leal -155497632(%rcx,%r10,1),%ecx movl 40(%rsi),%r10d xorl %ebx,%r11d xorl %edx,%r11d addl %r11d,%ecx roll $16,%ecx movl %edx,%r11d addl %edx,%ecx leal -1094730640(%rbx,%r10,1),%ebx movl 52(%rsi),%r10d xorl %eax,%r11d xorl %ecx,%r11d addl %r11d,%ebx roll $23,%ebx movl %ecx,%r11d addl %ecx,%ebx leal 681279174(%rax,%r10,1),%eax movl 0(%rsi),%r10d xorl %edx,%r11d xorl %ebx,%r11d addl %r11d,%eax roll $4,%eax movl %ebx,%r11d addl %ebx,%eax leal -358537222(%rdx,%r10,1),%edx movl 12(%rsi),%r10d xorl %ecx,%r11d xorl %eax,%r11d addl %r11d,%edx roll $11,%edx movl %eax,%r11d addl %eax,%edx leal -722521979(%rcx,%r10,1),%ecx movl 24(%rsi),%r10d xorl %ebx,%r11d xorl %edx,%r11d addl %r11d,%ecx roll $16,%ecx movl %edx,%r11d addl %edx,%ecx leal 76029189(%rbx,%r10,1),%ebx movl 36(%rsi),%r10d xorl %eax,%r11d xorl %ecx,%r11d addl %r11d,%ebx roll $23,%ebx movl %ecx,%r11d addl %ecx,%ebx leal -640364487(%rax,%r10,1),%eax movl 48(%rsi),%r10d xorl %edx,%r11d xorl %ebx,%r11d addl %r11d,%eax roll $4,%eax movl %ebx,%r11d addl %ebx,%eax leal -421815835(%rdx,%r10,1),%edx movl 60(%rsi),%r10d xorl %ecx,%r11d xorl %eax,%r11d addl %r11d,%edx roll $11,%edx movl %eax,%r11d addl %eax,%edx leal 530742520(%rcx,%r10,1),%ecx movl 8(%rsi),%r10d xorl %ebx,%r11d xorl %edx,%r11d addl %r11d,%ecx roll $16,%ecx movl %edx,%r11d addl %edx,%ecx leal -995338651(%rbx,%r10,1),%ebx movl 0(%rsi),%r10d xorl %eax,%r11d xorl %ecx,%r11d addl %r11d,%ebx roll $23,%ebx movl %ecx,%r11d addl %ecx,%ebx movl 0(%rsi),%r10d movl $0xffffffff,%r11d xorl %edx,%r11d leal -198630844(%rax,%r10,1),%eax orl %ebx,%r11d xorl %ecx,%r11d addl %r11d,%eax movl 28(%rsi),%r10d movl $0xffffffff,%r11d roll $6,%eax xorl %ecx,%r11d addl %ebx,%eax leal 1126891415(%rdx,%r10,1),%edx orl %eax,%r11d xorl %ebx,%r11d addl %r11d,%edx movl 56(%rsi),%r10d movl $0xffffffff,%r11d roll $10,%edx xorl %ebx,%r11d addl %eax,%edx leal -1416354905(%rcx,%r10,1),%ecx orl %edx,%r11d xorl %eax,%r11d addl %r11d,%ecx movl 20(%rsi),%r10d movl $0xffffffff,%r11d roll $15,%ecx xorl %eax,%r11d addl %edx,%ecx leal -57434055(%rbx,%r10,1),%ebx orl %ecx,%r11d xorl %edx,%r11d addl %r11d,%ebx movl 48(%rsi),%r10d movl $0xffffffff,%r11d roll $21,%ebx xorl %edx,%r11d addl %ecx,%ebx leal 1700485571(%rax,%r10,1),%eax orl %ebx,%r11d xorl %ecx,%r11d addl %r11d,%eax movl 12(%rsi),%r10d movl $0xffffffff,%r11d roll $6,%eax xorl %ecx,%r11d addl %ebx,%eax leal -1894986606(%rdx,%r10,1),%edx orl %eax,%r11d xorl %ebx,%r11d addl %r11d,%edx movl 40(%rsi),%r10d movl $0xffffffff,%r11d roll $10,%edx xorl %ebx,%r11d addl %eax,%edx leal -1051523(%rcx,%r10,1),%ecx orl %edx,%r11d xorl %eax,%r11d addl %r11d,%ecx movl 4(%rsi),%r10d movl $0xffffffff,%r11d roll $15,%ecx xorl %eax,%r11d addl %edx,%ecx leal -2054922799(%rbx,%r10,1),%ebx orl %ecx,%r11d xorl %edx,%r11d addl %r11d,%ebx movl 32(%rsi),%r10d movl $0xffffffff,%r11d roll $21,%ebx xorl %edx,%r11d addl %ecx,%ebx leal 1873313359(%rax,%r10,1),%eax orl %ebx,%r11d xorl %ecx,%r11d addl %r11d,%eax movl 60(%rsi),%r10d movl $0xffffffff,%r11d roll $6,%eax xorl %ecx,%r11d addl %ebx,%eax leal -30611744(%rdx,%r10,1),%edx orl %eax,%r11d xorl %ebx,%r11d addl %r11d,%edx movl 24(%rsi),%r10d movl $0xffffffff,%r11d roll $10,%edx xorl %ebx,%r11d addl %eax,%edx leal -1560198380(%rcx,%r10,1),%ecx orl %edx,%r11d xorl %eax,%r11d addl %r11d,%ecx movl 52(%rsi),%r10d movl $0xffffffff,%r11d roll $15,%ecx xorl %eax,%r11d addl %edx,%ecx leal 1309151649(%rbx,%r10,1),%ebx orl %ecx,%r11d xorl %edx,%r11d addl %r11d,%ebx movl 16(%rsi),%r10d movl $0xffffffff,%r11d roll $21,%ebx xorl %edx,%r11d addl %ecx,%ebx leal -145523070(%rax,%r10,1),%eax orl %ebx,%r11d xorl %ecx,%r11d addl %r11d,%eax movl 44(%rsi),%r10d movl $0xffffffff,%r11d roll $6,%eax xorl %ecx,%r11d addl %ebx,%eax leal -1120210379(%rdx,%r10,1),%edx orl %eax,%r11d xorl %ebx,%r11d addl %r11d,%edx movl 8(%rsi),%r10d movl $0xffffffff,%r11d roll $10,%edx xorl %ebx,%r11d addl %eax,%edx leal 718787259(%rcx,%r10,1),%ecx orl %edx,%r11d xorl %eax,%r11d addl %r11d,%ecx movl 36(%rsi),%r10d movl $0xffffffff,%r11d roll $15,%ecx xorl %eax,%r11d addl %edx,%ecx leal -343485551(%rbx,%r10,1),%ebx orl %ecx,%r11d xorl %edx,%r11d addl %r11d,%ebx movl 0(%rsi),%r10d movl $0xffffffff,%r11d roll $21,%ebx xorl %edx,%r11d addl %ecx,%ebx addl %r8d,%eax addl %r9d,%ebx addl %r14d,%ecx addl %r15d,%edx addq $64,%rsi cmpq %rdi,%rsi jb L$loop L$end: movl %eax,0(%rbp) movl %ebx,4(%rbp) movl %ecx,8(%rbp) movl %edx,12(%rbp) movq (%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r12 movq 24(%rsp),%rbx movq 32(%rsp),%rbp addq $40,%rsp L$epilogue: .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
120,346
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/aesni-xts-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl _aes_hw_xts_encrypt_avx512 .private_extern _aes_hw_xts_encrypt_avx512 .private_extern _aes_hw_xts_encrypt_avx512 .p2align 5 _aes_hw_xts_encrypt_avx512: .byte 243,15,30,250 pushq %rbp movq %rsp,%rbp subq $376,%rsp andq $0xffffffffffffffc0,%rsp movq %rbx,368(%rsp) movq $0x87,%r10 vmovdqu (%r9),%xmm1 vpxor %xmm4,%xmm4,%xmm4 vmovdqu (%r8),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqu (%rcx),%xmm2 vmovdqa %xmm2,128(%rsp) vmovdqu 16(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 16(%rcx),%xmm2 vmovdqa %xmm2,144(%rsp) vmovdqu 32(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 32(%rcx),%xmm2 vmovdqa %xmm2,160(%rsp) vmovdqu 48(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 48(%rcx),%xmm2 vmovdqa %xmm2,176(%rsp) vmovdqu 64(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 64(%rcx),%xmm2 vmovdqa %xmm2,192(%rsp) vmovdqu 80(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 80(%rcx),%xmm2 vmovdqa %xmm2,208(%rsp) vmovdqu 96(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 96(%rcx),%xmm2 vmovdqa %xmm2,224(%rsp) vmovdqu 112(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 112(%rcx),%xmm2 vmovdqa %xmm2,240(%rsp) vmovdqu 128(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 128(%rcx),%xmm2 vmovdqa %xmm2,256(%rsp) vmovdqu 144(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 144(%rcx),%xmm2 vmovdqa %xmm2,272(%rsp) vmovdqu 160(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 160(%rcx),%xmm2 vmovdqa %xmm2,288(%rsp) vmovdqu 176(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 176(%rcx),%xmm2 vmovdqa %xmm2,304(%rsp) vmovdqu 192(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 192(%rcx),%xmm2 vmovdqa %xmm2,320(%rsp) vmovdqu 208(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 208(%rcx),%xmm2 vmovdqa %xmm2,336(%rsp) vmovdqu 224(%r8),%xmm0 .byte 98,242,117,8,221,200 vmovdqu 224(%rcx),%xmm2 vmovdqa %xmm2,352(%rsp) vmovdqa %xmm1,(%rsp) cmpq $0x80,%rdx jl L$_less_than_128_bytes_hEgxyDlCngwrfFe vpbroadcastq %r10,%zmm25 cmpq $0x100,%rdx jge L$_start_by16_hEgxyDlCngwrfFe cmpq $0x80,%rdx jge L$_start_by8_hEgxyDlCngwrfFe L$_do_n_blocks_hEgxyDlCngwrfFe: cmpq $0x0,%rdx je L$_ret_hEgxyDlCngwrfFe cmpq $0x70,%rdx jge L$_remaining_num_blocks_is_7_hEgxyDlCngwrfFe cmpq $0x60,%rdx jge L$_remaining_num_blocks_is_6_hEgxyDlCngwrfFe cmpq $0x50,%rdx jge L$_remaining_num_blocks_is_5_hEgxyDlCngwrfFe cmpq $0x40,%rdx jge L$_remaining_num_blocks_is_4_hEgxyDlCngwrfFe cmpq $0x30,%rdx jge L$_remaining_num_blocks_is_3_hEgxyDlCngwrfFe cmpq $0x20,%rdx jge L$_remaining_num_blocks_is_2_hEgxyDlCngwrfFe cmpq $0x10,%rdx jge L$_remaining_num_blocks_is_1_hEgxyDlCngwrfFe vmovdqa %xmm0,%xmm8 vmovdqa %xmm9,%xmm0 jmp L$_steal_cipher_hEgxyDlCngwrfFe L$_remaining_num_blocks_is_7_hEgxyDlCngwrfFe: movq $0xffffffffffffffff,%r8 shrq $0x10,%r8 kmovq %r8,%k1 vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2{%k1} addq $0x70,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi){%k1} addq $0x70,%rsi vextracti32x4 $0x2,%zmm2,%xmm8 vextracti32x4 $0x3,%zmm10,%xmm0 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_hEgxyDlCngwrfFe L$_remaining_num_blocks_is_6_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%ymm2 addq $0x60,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %ymm2,64(%rsi) addq $0x60,%rsi vextracti32x4 $0x1,%zmm2,%xmm8 vextracti32x4 $0x2,%zmm10,%xmm0 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_hEgxyDlCngwrfFe L$_remaining_num_blocks_is_5_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 vmovdqu 64(%rdi),%xmm2 addq $0x50,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu %xmm2,64(%rsi) addq $0x50,%rsi movdqa %xmm2,%xmm8 vextracti32x4 $0x1,%zmm10,%xmm0 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_hEgxyDlCngwrfFe L$_remaining_num_blocks_is_4_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 addq $0x40,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) addq $0x40,%rsi vextracti32x4 $0x3,%zmm1,%xmm8 vextracti32x4 $0x0,%zmm10,%xmm0 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_hEgxyDlCngwrfFe L$_remaining_num_blocks_is_3_hEgxyDlCngwrfFe: vextracti32x4 $0x1,%zmm9,%xmm10 vextracti32x4 $0x2,%zmm9,%xmm11 vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm2 vmovdqu 32(%rdi),%xmm3 addq $0x30,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x30,%rsi vmovdqa %xmm3,%xmm8 vextracti32x4 $0x3,%zmm9,%xmm0 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_hEgxyDlCngwrfFe L$_remaining_num_blocks_is_2_hEgxyDlCngwrfFe: vextracti32x4 $0x1,%zmm9,%xmm10 vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm2 addq $0x20,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x20,%rsi vmovdqa %xmm2,%xmm8 vextracti32x4 $0x2,%zmm9,%xmm0 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_hEgxyDlCngwrfFe L$_remaining_num_blocks_is_1_hEgxyDlCngwrfFe: vmovdqu (%rdi),%xmm1 addq $0x10,%rdi vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $0x10,%rsi vmovdqa %xmm1,%xmm8 vextracti32x4 $0x1,%zmm9,%xmm0 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_hEgxyDlCngwrfFe L$_start_by16_hEgxyDlCngwrfFe: vbroadcasti32x4 (%rsp),%zmm0 vbroadcasti32x4 shufb_15_7(%rip),%zmm8 movq $0xaa,%r8 kmovq %r8,%k2 vpshufb %zmm8,%zmm0,%zmm1 vpsllvq const_dq3210(%rip),%zmm0,%zmm4 vpsrlvq const_dq5678(%rip),%zmm1,%zmm2 .byte 98,147,109,72,68,217,0 vpxorq %zmm2,%zmm4,%zmm4{%k2} vpxord %zmm4,%zmm3,%zmm9 vpsllvq const_dq7654(%rip),%zmm0,%zmm5 vpsrlvq const_dq1234(%rip),%zmm1,%zmm6 .byte 98,147,77,72,68,249,0 vpxorq %zmm6,%zmm5,%zmm5{%k2} vpxord %zmm5,%zmm7,%zmm10 vpsrldq $0xf,%zmm9,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm9,%zmm11 vpxord %zmm14,%zmm11,%zmm11 vpsrldq $0xf,%zmm10,%zmm15 .byte 98,131,5,72,68,193,0 vpslldq $0x1,%zmm10,%zmm12 vpxord %zmm16,%zmm12,%zmm12 L$_main_loop_run_16_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2 vmovdqu8 128(%rdi),%zmm3 vmovdqu8 192(%rdi),%zmm4 addq $0x100,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vpxorq %zmm11,%zmm3,%zmm3 vpxorq %zmm12,%zmm4,%zmm4 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vpxorq %zmm0,%zmm3,%zmm3 vpxorq %zmm0,%zmm4,%zmm4 vpsrldq $0xf,%zmm11,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm11,%zmm15 vpxord %zmm14,%zmm15,%zmm15 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vpsrldq $0xf,%zmm12,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm12,%zmm16 vpxord %zmm14,%zmm16,%zmm16 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vpsrldq $0xf,%zmm15,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm15,%zmm17 vpxord %zmm14,%zmm17,%zmm17 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vpsrldq $0xf,%zmm16,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm16,%zmm18 vpxord %zmm14,%zmm18,%zmm18 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 .byte 98,242,101,72,221,216 .byte 98,242,93,72,221,224 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vpxorq %zmm11,%zmm3,%zmm3 vpxorq %zmm12,%zmm4,%zmm4 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqa32 %zmm17,%zmm11 vmovdqa32 %zmm18,%zmm12 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi) vmovdqu8 %zmm3,128(%rsi) vmovdqu8 %zmm4,192(%rsi) addq $0x100,%rsi subq $0x100,%rdx cmpq $0x100,%rdx jge L$_main_loop_run_16_hEgxyDlCngwrfFe cmpq $0x80,%rdx jge L$_main_loop_run_8_hEgxyDlCngwrfFe vextracti32x4 $0x3,%zmm4,%xmm0 jmp L$_do_n_blocks_hEgxyDlCngwrfFe L$_start_by8_hEgxyDlCngwrfFe: vbroadcasti32x4 (%rsp),%zmm0 vbroadcasti32x4 shufb_15_7(%rip),%zmm8 movq $0xaa,%r8 kmovq %r8,%k2 vpshufb %zmm8,%zmm0,%zmm1 vpsllvq const_dq3210(%rip),%zmm0,%zmm4 vpsrlvq const_dq5678(%rip),%zmm1,%zmm2 .byte 98,147,109,72,68,217,0 vpxorq %zmm2,%zmm4,%zmm4{%k2} vpxord %zmm4,%zmm3,%zmm9 vpsllvq const_dq7654(%rip),%zmm0,%zmm5 vpsrlvq const_dq1234(%rip),%zmm1,%zmm6 .byte 98,147,77,72,68,249,0 vpxorq %zmm6,%zmm5,%zmm5{%k2} vpxord %zmm5,%zmm7,%zmm10 L$_main_loop_run_8_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2 addq $0x80,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vpsrldq $0xf,%zmm9,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm9,%zmm15 vpxord %zmm14,%zmm15,%zmm15 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vpsrldq $0xf,%zmm10,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm10,%zmm16 vpxord %zmm14,%zmm16,%zmm16 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi) addq $0x80,%rsi subq $0x80,%rdx cmpq $0x80,%rdx jge L$_main_loop_run_8_hEgxyDlCngwrfFe vextracti32x4 $0x3,%zmm2,%xmm0 jmp L$_do_n_blocks_hEgxyDlCngwrfFe L$_steal_cipher_next_hEgxyDlCngwrfFe: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,(%rsp) movq %rbx,8(%rsp) vmovdqa (%rsp),%xmm0 L$_steal_cipher_hEgxyDlCngwrfFe: vmovdqa %xmm8,%xmm2 leaq vpshufb_shf_table(%rip),%rax vmovdqu (%rax,%rdx,1),%xmm10 vpshufb %xmm10,%xmm8,%xmm8 vmovdqu -16(%rdi,%rdx,1),%xmm3 vmovdqu %xmm8,-16(%rsi,%rdx,1) leaq vpshufb_shf_table(%rip),%rax addq $16,%rax subq %rdx,%rax vmovdqu (%rax),%xmm10 vpxor mask1(%rip),%xmm10,%xmm10 vpshufb %xmm10,%xmm3,%xmm3 vpblendvb %xmm10,%xmm2,%xmm3,%xmm3 vpxor %xmm0,%xmm3,%xmm8 vpxor 128(%rsp),%xmm8,%xmm8 .byte 98,114,61,8,220,132,36,144,0,0,0 .byte 98,114,61,8,220,132,36,160,0,0,0 .byte 98,114,61,8,220,132,36,176,0,0,0 .byte 98,114,61,8,220,132,36,192,0,0,0 .byte 98,114,61,8,220,132,36,208,0,0,0 .byte 98,114,61,8,220,132,36,224,0,0,0 .byte 98,114,61,8,220,132,36,240,0,0,0 .byte 98,114,61,8,220,132,36,0,1,0,0 .byte 98,114,61,8,220,132,36,16,1,0,0 .byte 98,114,61,8,220,132,36,32,1,0,0 .byte 98,114,61,8,220,132,36,48,1,0,0 .byte 98,114,61,8,220,132,36,64,1,0,0 .byte 98,114,61,8,220,132,36,80,1,0,0 .byte 98,114,61,8,221,132,36,96,1,0,0 vpxor %xmm0,%xmm8,%xmm8 vmovdqu %xmm8,-16(%rsi) L$_ret_hEgxyDlCngwrfFe: movq 368(%rsp),%rbx xorq %r8,%r8 movq %r8,368(%rsp) vpxorq %zmm0,%zmm0,%zmm0 vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) movq $0x3f,%r8 kmovq %r8,%k2 vmovdqa64 %zmm0,320(%rsp){%k2} movq %rbp,%rsp popq %rbp vzeroupper .byte 0xf3,0xc3 L$_less_than_128_bytes_hEgxyDlCngwrfFe: cmpq $0x10,%rdx jb L$_ret_hEgxyDlCngwrfFe movq %rdx,%r8 andq $0x70,%r8 cmpq $0x60,%r8 je L$_num_blocks_is_6_hEgxyDlCngwrfFe cmpq $0x50,%r8 je L$_num_blocks_is_5_hEgxyDlCngwrfFe cmpq $0x40,%r8 je L$_num_blocks_is_4_hEgxyDlCngwrfFe cmpq $0x30,%r8 je L$_num_blocks_is_3_hEgxyDlCngwrfFe cmpq $0x20,%r8 je L$_num_blocks_is_2_hEgxyDlCngwrfFe cmpq $0x10,%r8 je L$_num_blocks_is_1_hEgxyDlCngwrfFe L$_num_blocks_is_7_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,80(%rsp) movq %rbx,88(%rsp) vmovdqa 80(%rsp),%xmm14 vmovdqu 80(%rdi),%xmm6 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,96(%rsp) movq %rbx,104(%rsp) vmovdqa 96(%rsp),%xmm15 vmovdqu 96(%rdi),%xmm7 addq $0x70,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vpxor %xmm0,%xmm7,%xmm7 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 .byte 98,242,93,8,221,224 .byte 98,242,85,8,221,232 .byte 98,242,77,8,221,240 .byte 98,242,69,8,221,248 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) vmovdqu %xmm7,96(%rsi) addq $0x70,%rsi vmovdqa %xmm7,%xmm8 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_next_hEgxyDlCngwrfFe L$_num_blocks_is_6_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,80(%rsp) movq %rbx,88(%rsp) vmovdqa 80(%rsp),%xmm14 vmovdqu 80(%rdi),%xmm6 addq $0x60,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 .byte 98,242,93,8,221,224 .byte 98,242,85,8,221,232 .byte 98,242,77,8,221,240 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) addq $0x60,%rsi vmovdqa %xmm6,%xmm8 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_next_hEgxyDlCngwrfFe L$_num_blocks_is_5_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 addq $0x50,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 .byte 98,242,93,8,221,224 .byte 98,242,85,8,221,232 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) addq $0x50,%rsi vmovdqa %xmm5,%xmm8 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_next_hEgxyDlCngwrfFe L$_num_blocks_is_4_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 addq $0x40,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 .byte 98,242,93,8,221,224 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) addq $0x40,%rsi vmovdqa %xmm4,%xmm8 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_next_hEgxyDlCngwrfFe L$_num_blocks_is_3_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 addq $0x30,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x30,%rsi vmovdqa %xmm3,%xmm8 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_next_hEgxyDlCngwrfFe L$_num_blocks_is_2_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 addq $0x20,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x20,%rsi vmovdqa %xmm2,%xmm8 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_next_hEgxyDlCngwrfFe L$_num_blocks_is_1_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 addq $0x10,%rdi vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $0x10,%rsi vmovdqa %xmm1,%xmm8 andq $0xf,%rdx je L$_ret_hEgxyDlCngwrfFe jmp L$_steal_cipher_next_hEgxyDlCngwrfFe .globl _aes_hw_xts_decrypt_avx512 .private_extern _aes_hw_xts_decrypt_avx512 .private_extern _aes_hw_xts_decrypt_avx512 .p2align 5 _aes_hw_xts_decrypt_avx512: .byte 243,15,30,250 pushq %rbp movq %rsp,%rbp subq $376,%rsp andq $0xffffffffffffffc0,%rsp movq %rbx,368(%rsp) movq $0x87,%r10 vmovdqu (%r9),%xmm1 vpxor %xmm4,%xmm4,%xmm4 vmovdqu (%r8),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqu 224(%rcx),%xmm2 vmovdqa %xmm2,352(%rsp) vmovdqu 16(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 208(%rcx),%xmm2 vmovdqa %xmm2,336(%rsp) vmovdqu 32(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 192(%rcx),%xmm2 vmovdqa %xmm2,320(%rsp) vmovdqu 48(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 176(%rcx),%xmm2 vmovdqa %xmm2,304(%rsp) vmovdqu 64(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 160(%rcx),%xmm2 vmovdqa %xmm2,288(%rsp) vmovdqu 80(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 144(%rcx),%xmm2 vmovdqa %xmm2,272(%rsp) vmovdqu 96(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 128(%rcx),%xmm2 vmovdqa %xmm2,256(%rsp) vmovdqu 112(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 112(%rcx),%xmm2 vmovdqa %xmm2,240(%rsp) vmovdqu 128(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 96(%rcx),%xmm2 vmovdqa %xmm2,224(%rsp) vmovdqu 144(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 80(%rcx),%xmm2 vmovdqa %xmm2,208(%rsp) vmovdqu 160(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 64(%rcx),%xmm2 vmovdqa %xmm2,192(%rsp) vmovdqu 176(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 48(%rcx),%xmm2 vmovdqa %xmm2,176(%rsp) vmovdqu 192(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 32(%rcx),%xmm2 vmovdqa %xmm2,160(%rsp) vmovdqu 208(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 16(%rcx),%xmm2 vmovdqa %xmm2,144(%rsp) vmovdqu 224(%r8),%xmm0 .byte 98,242,117,8,221,200 vmovdqu (%rcx),%xmm2 vmovdqa %xmm2,128(%rsp) vmovdqa %xmm1,(%rsp) cmpq $0x80,%rdx jb L$_less_than_128_bytes_amivrujEyduiFoi vpbroadcastq %r10,%zmm25 cmpq $0x100,%rdx jge L$_start_by16_amivrujEyduiFoi jmp L$_start_by8_amivrujEyduiFoi L$_do_n_blocks_amivrujEyduiFoi: cmpq $0x0,%rdx je L$_ret_amivrujEyduiFoi cmpq $0x70,%rdx jge L$_remaining_num_blocks_is_7_amivrujEyduiFoi cmpq $0x60,%rdx jge L$_remaining_num_blocks_is_6_amivrujEyduiFoi cmpq $0x50,%rdx jge L$_remaining_num_blocks_is_5_amivrujEyduiFoi cmpq $0x40,%rdx jge L$_remaining_num_blocks_is_4_amivrujEyduiFoi cmpq $0x30,%rdx jge L$_remaining_num_blocks_is_3_amivrujEyduiFoi cmpq $0x20,%rdx jge L$_remaining_num_blocks_is_2_amivrujEyduiFoi cmpq $0x10,%rdx jge L$_remaining_num_blocks_is_1_amivrujEyduiFoi vmovdqu %xmm5,%xmm1 vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,-16(%rsi) vmovdqa %xmm1,%xmm8 movq $0x1,%r8 kmovq %r8,%k1 vpsllq $0x3f,%xmm9,%xmm13 vpsraq $0x3f,%xmm13,%xmm14 vpandq %xmm25,%xmm14,%xmm5 vpxorq %xmm5,%xmm9,%xmm9{%k1} vpsrldq $0x8,%xmm9,%xmm10 .byte 98, 211, 181, 8, 115, 194, 1 vpslldq $0x8,%xmm13,%xmm13 vpxorq %xmm13,%xmm0,%xmm0 jmp L$_steal_cipher_amivrujEyduiFoi L$_remaining_num_blocks_is_7_amivrujEyduiFoi: movq $0xffffffffffffffff,%r8 shrq $0x10,%r8 kmovq %r8,%k1 vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2{%k1} addq $0x70,%rdi andq $0xf,%rdx je L$_done_7_remain_amivrujEyduiFoi vextracti32x4 $0x2,%zmm10,%xmm12 vextracti32x4 $0x3,%zmm10,%xmm13 vinserti32x4 $0x2,%xmm13,%zmm10,%zmm10 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi){%k1} addq $0x70,%rsi vextracti32x4 $0x2,%zmm2,%xmm8 vmovdqa %xmm12,%xmm0 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_7_remain_amivrujEyduiFoi: vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi){%k1} jmp L$_ret_amivrujEyduiFoi L$_remaining_num_blocks_is_6_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%ymm2 addq $0x60,%rdi andq $0xf,%rdx je L$_done_6_remain_amivrujEyduiFoi vextracti32x4 $0x1,%zmm10,%xmm12 vextracti32x4 $0x2,%zmm10,%xmm13 vinserti32x4 $0x1,%xmm13,%zmm10,%zmm10 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %ymm2,64(%rsi) addq $0x60,%rsi vextracti32x4 $0x1,%zmm2,%xmm8 vmovdqa %xmm12,%xmm0 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_6_remain_amivrujEyduiFoi: vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %ymm2,64(%rsi) jmp L$_ret_amivrujEyduiFoi L$_remaining_num_blocks_is_5_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 vmovdqu 64(%rdi),%xmm2 addq $0x50,%rdi andq $0xf,%rdx je L$_done_5_remain_amivrujEyduiFoi vmovdqa %xmm10,%xmm12 vextracti32x4 $0x1,%zmm10,%xmm10 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu %xmm2,64(%rsi) addq $0x50,%rsi vmovdqa %xmm2,%xmm8 vmovdqa %xmm12,%xmm0 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_5_remain_amivrujEyduiFoi: vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %xmm2,64(%rsi) jmp L$_ret_amivrujEyduiFoi L$_remaining_num_blocks_is_4_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 addq $0x40,%rdi andq $0xf,%rdx je L$_done_4_remain_amivrujEyduiFoi vextracti32x4 $0x3,%zmm9,%xmm12 vinserti32x4 $0x3,%xmm10,%zmm9,%zmm9 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) addq $0x40,%rsi vextracti32x4 $0x3,%zmm1,%xmm8 vmovdqa %xmm12,%xmm0 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_4_remain_amivrujEyduiFoi: vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) jmp L$_ret_amivrujEyduiFoi L$_remaining_num_blocks_is_3_amivrujEyduiFoi: vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm2 vmovdqu 32(%rdi),%xmm3 addq $0x30,%rdi andq $0xf,%rdx je L$_done_3_remain_amivrujEyduiFoi vextracti32x4 $0x2,%zmm9,%xmm13 vextracti32x4 $0x1,%zmm9,%xmm10 vextracti32x4 $0x3,%zmm9,%xmm11 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x30,%rsi vmovdqa %xmm3,%xmm8 vmovdqa %xmm13,%xmm0 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_3_remain_amivrujEyduiFoi: vextracti32x4 $0x1,%zmm9,%xmm10 vextracti32x4 $0x2,%zmm9,%xmm11 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) jmp L$_ret_amivrujEyduiFoi L$_remaining_num_blocks_is_2_amivrujEyduiFoi: vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm2 addq $0x20,%rdi andq $0xf,%rdx je L$_done_2_remain_amivrujEyduiFoi vextracti32x4 $0x2,%zmm9,%xmm10 vextracti32x4 $0x1,%zmm9,%xmm12 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x20,%rsi vmovdqa %xmm2,%xmm8 vmovdqa %xmm12,%xmm0 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_2_remain_amivrujEyduiFoi: vextracti32x4 $0x1,%zmm9,%xmm10 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) jmp L$_ret_amivrujEyduiFoi L$_remaining_num_blocks_is_1_amivrujEyduiFoi: vmovdqu (%rdi),%xmm1 addq $0x10,%rdi andq $0xf,%rdx je L$_done_1_remain_amivrujEyduiFoi vextracti32x4 $0x1,%zmm9,%xmm11 vpxor %xmm11,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm11,%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $0x10,%rsi vmovdqa %xmm1,%xmm8 vmovdqa %xmm9,%xmm0 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_1_remain_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) jmp L$_ret_amivrujEyduiFoi L$_start_by16_amivrujEyduiFoi: vbroadcasti32x4 (%rsp),%zmm0 vbroadcasti32x4 shufb_15_7(%rip),%zmm8 movq $0xaa,%r8 kmovq %r8,%k2 vpshufb %zmm8,%zmm0,%zmm1 vpsllvq const_dq3210(%rip),%zmm0,%zmm4 vpsrlvq const_dq5678(%rip),%zmm1,%zmm2 .byte 98,147,109,72,68,217,0 vpxorq %zmm2,%zmm4,%zmm4{%k2} vpxord %zmm4,%zmm3,%zmm9 vpsllvq const_dq7654(%rip),%zmm0,%zmm5 vpsrlvq const_dq1234(%rip),%zmm1,%zmm6 .byte 98,147,77,72,68,249,0 vpxorq %zmm6,%zmm5,%zmm5{%k2} vpxord %zmm5,%zmm7,%zmm10 vpsrldq $0xf,%zmm9,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm9,%zmm11 vpxord %zmm14,%zmm11,%zmm11 vpsrldq $0xf,%zmm10,%zmm15 .byte 98,131,5,72,68,193,0 vpslldq $0x1,%zmm10,%zmm12 vpxord %zmm16,%zmm12,%zmm12 L$_main_loop_run_16_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2 vmovdqu8 128(%rdi),%zmm3 vmovdqu8 192(%rdi),%zmm4 vmovdqu8 240(%rdi),%zmm5 addq $0x100,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vpxorq %zmm11,%zmm3,%zmm3 vpxorq %zmm12,%zmm4,%zmm4 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vpxorq %zmm0,%zmm3,%zmm3 vpxorq %zmm0,%zmm4,%zmm4 vpsrldq $0xf,%zmm11,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm11,%zmm15 vpxord %zmm14,%zmm15,%zmm15 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vpsrldq $0xf,%zmm12,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm12,%zmm16 vpxord %zmm14,%zmm16,%zmm16 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vpsrldq $0xf,%zmm15,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm15,%zmm17 vpxord %zmm14,%zmm17,%zmm17 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vpsrldq $0xf,%zmm16,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm16,%zmm18 vpxord %zmm14,%zmm18,%zmm18 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 .byte 98,242,101,72,223,216 .byte 98,242,93,72,223,224 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vpxorq %zmm11,%zmm3,%zmm3 vpxorq %zmm12,%zmm4,%zmm4 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqa32 %zmm17,%zmm11 vmovdqa32 %zmm18,%zmm12 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi) vmovdqu8 %zmm3,128(%rsi) vmovdqu8 %zmm4,192(%rsi) addq $0x100,%rsi subq $0x100,%rdx cmpq $0x100,%rdx jge L$_main_loop_run_16_amivrujEyduiFoi cmpq $0x80,%rdx jge L$_main_loop_run_8_amivrujEyduiFoi jmp L$_do_n_blocks_amivrujEyduiFoi L$_start_by8_amivrujEyduiFoi: vbroadcasti32x4 (%rsp),%zmm0 vbroadcasti32x4 shufb_15_7(%rip),%zmm8 movq $0xaa,%r8 kmovq %r8,%k2 vpshufb %zmm8,%zmm0,%zmm1 vpsllvq const_dq3210(%rip),%zmm0,%zmm4 vpsrlvq const_dq5678(%rip),%zmm1,%zmm2 .byte 98,147,109,72,68,217,0 vpxorq %zmm2,%zmm4,%zmm4{%k2} vpxord %zmm4,%zmm3,%zmm9 vpsllvq const_dq7654(%rip),%zmm0,%zmm5 vpsrlvq const_dq1234(%rip),%zmm1,%zmm6 .byte 98,147,77,72,68,249,0 vpxorq %zmm6,%zmm5,%zmm5{%k2} vpxord %zmm5,%zmm7,%zmm10 L$_main_loop_run_8_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2 vmovdqu8 112(%rdi),%xmm5 addq $0x80,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vpsrldq $0xf,%zmm9,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm9,%zmm15 vpxord %zmm14,%zmm15,%zmm15 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vpsrldq $0xf,%zmm10,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm10,%zmm16 vpxord %zmm14,%zmm16,%zmm16 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi) addq $0x80,%rsi subq $0x80,%rdx cmpq $0x80,%rdx jge L$_main_loop_run_8_amivrujEyduiFoi jmp L$_do_n_blocks_amivrujEyduiFoi L$_steal_cipher_amivrujEyduiFoi: vmovdqa %xmm8,%xmm2 leaq vpshufb_shf_table(%rip),%rax vmovdqu (%rax,%rdx,1),%xmm10 vpshufb %xmm10,%xmm8,%xmm8 vmovdqu -16(%rdi,%rdx,1),%xmm3 vmovdqu %xmm8,-16(%rsi,%rdx,1) leaq vpshufb_shf_table(%rip),%rax addq $16,%rax subq %rdx,%rax vmovdqu (%rax),%xmm10 vpxor mask1(%rip),%xmm10,%xmm10 vpshufb %xmm10,%xmm3,%xmm3 vpblendvb %xmm10,%xmm2,%xmm3,%xmm3 vpxor %xmm0,%xmm3,%xmm8 vpxor 128(%rsp),%xmm8,%xmm8 .byte 98,114,61,8,222,132,36,144,0,0,0 .byte 98,114,61,8,222,132,36,160,0,0,0 .byte 98,114,61,8,222,132,36,176,0,0,0 .byte 98,114,61,8,222,132,36,192,0,0,0 .byte 98,114,61,8,222,132,36,208,0,0,0 .byte 98,114,61,8,222,132,36,224,0,0,0 .byte 98,114,61,8,222,132,36,240,0,0,0 .byte 98,114,61,8,222,132,36,0,1,0,0 .byte 98,114,61,8,222,132,36,16,1,0,0 .byte 98,114,61,8,222,132,36,32,1,0,0 .byte 98,114,61,8,222,132,36,48,1,0,0 .byte 98,114,61,8,222,132,36,64,1,0,0 .byte 98,114,61,8,222,132,36,80,1,0,0 .byte 98,114,61,8,223,132,36,96,1,0,0 vpxor %xmm0,%xmm8,%xmm8 L$_done_amivrujEyduiFoi: vmovdqu %xmm8,-16(%rsi) L$_ret_amivrujEyduiFoi: movq 368(%rsp),%rbx xorq %r8,%r8 movq %r8,368(%rsp) vpxorq %zmm0,%zmm0,%zmm0 vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) movq $0x3f,%r8 kmovq %r8,%k2 vmovdqa64 %zmm0,320(%rsp){%k2} movq %rbp,%rsp popq %rbp vzeroupper .byte 0xf3,0xc3 L$_less_than_128_bytes_amivrujEyduiFoi: cmpq $0x10,%rdx jb L$_ret_amivrujEyduiFoi movq %rdx,%r8 andq $0x70,%r8 cmpq $0x60,%r8 je L$_num_blocks_is_6_amivrujEyduiFoi cmpq $0x50,%r8 je L$_num_blocks_is_5_amivrujEyduiFoi cmpq $0x40,%r8 je L$_num_blocks_is_4_amivrujEyduiFoi cmpq $0x30,%r8 je L$_num_blocks_is_3_amivrujEyduiFoi cmpq $0x20,%r8 je L$_num_blocks_is_2_amivrujEyduiFoi cmpq $0x10,%r8 je L$_num_blocks_is_1_amivrujEyduiFoi L$_num_blocks_is_7_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,80(%rsp) movq %rbx,88(%rsp) vmovdqa 80(%rsp),%xmm14 vmovdqu 80(%rdi),%xmm6 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,96(%rsp) movq %rbx,104(%rsp) vmovdqa 96(%rsp),%xmm15 vmovdqu 96(%rdi),%xmm7 addq $0x70,%rdi andq $0xf,%rdx je L$_done_7_amivrujEyduiFoi L$_steal_cipher_7_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm15,%xmm16 vmovdqa 16(%rsp),%xmm15 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vpxor %xmm0,%xmm7,%xmm7 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 .byte 98,242,77,8,223,240 .byte 98,242,69,8,223,248 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) addq $0x70,%rsi vmovdqa64 %xmm16,%xmm0 vmovdqa %xmm7,%xmm8 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_7_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vpxor %xmm0,%xmm7,%xmm7 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 .byte 98,242,77,8,223,240 .byte 98,242,69,8,223,248 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) addq $0x70,%rsi vmovdqa %xmm7,%xmm8 jmp L$_done_amivrujEyduiFoi L$_num_blocks_is_6_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,80(%rsp) movq %rbx,88(%rsp) vmovdqa 80(%rsp),%xmm14 vmovdqu 80(%rdi),%xmm6 addq $0x60,%rdi andq $0xf,%rdx je L$_done_6_amivrujEyduiFoi L$_steal_cipher_6_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm14,%xmm15 vmovdqa 16(%rsp),%xmm14 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 .byte 98,242,77,8,223,240 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) addq $0x60,%rsi vmovdqa %xmm15,%xmm0 vmovdqa %xmm6,%xmm8 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_6_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 .byte 98,242,77,8,223,240 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) addq $0x60,%rsi vmovdqa %xmm6,%xmm8 jmp L$_done_amivrujEyduiFoi L$_num_blocks_is_5_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 addq $0x50,%rdi andq $0xf,%rdx je L$_done_5_amivrujEyduiFoi L$_steal_cipher_5_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm13,%xmm14 vmovdqa 16(%rsp),%xmm13 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) addq $0x50,%rsi vmovdqa %xmm14,%xmm0 vmovdqa %xmm5,%xmm8 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_5_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) addq $0x50,%rsi vmovdqa %xmm5,%xmm8 jmp L$_done_amivrujEyduiFoi L$_num_blocks_is_4_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 addq $0x40,%rdi andq $0xf,%rdx je L$_done_4_amivrujEyduiFoi L$_steal_cipher_4_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm12,%xmm13 vmovdqa 16(%rsp),%xmm12 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x40,%rsi vmovdqa %xmm13,%xmm0 vmovdqa %xmm4,%xmm8 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_4_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x40,%rsi vmovdqa %xmm4,%xmm8 jmp L$_done_amivrujEyduiFoi L$_num_blocks_is_3_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 addq $0x30,%rdi andq $0xf,%rdx je L$_done_3_amivrujEyduiFoi L$_steal_cipher_3_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm11,%xmm12 vmovdqa 16(%rsp),%xmm11 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x30,%rsi vmovdqa %xmm12,%xmm0 vmovdqa %xmm3,%xmm8 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_3_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x30,%rsi vmovdqa %xmm3,%xmm8 jmp L$_done_amivrujEyduiFoi L$_num_blocks_is_2_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 addq $0x20,%rdi andq $0xf,%rdx je L$_done_2_amivrujEyduiFoi L$_steal_cipher_2_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm10,%xmm11 vmovdqa 16(%rsp),%xmm10 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) addq $0x20,%rsi vmovdqa %xmm11,%xmm0 vmovdqa %xmm2,%xmm8 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_2_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) addq $0x20,%rsi vmovdqa %xmm2,%xmm8 jmp L$_done_amivrujEyduiFoi L$_num_blocks_is_1_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 addq $0x10,%rdi andq $0xf,%rdx je L$_done_1_amivrujEyduiFoi L$_steal_cipher_1_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm9,%xmm10 vmovdqa 16(%rsp),%xmm9 vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm9,%xmm1,%xmm1 addq $0x10,%rsi vmovdqa %xmm10,%xmm0 vmovdqa %xmm1,%xmm8 jmp L$_steal_cipher_amivrujEyduiFoi L$_done_1_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm9,%xmm1,%xmm1 addq $0x10,%rsi vmovdqa %xmm1,%xmm8 jmp L$_done_amivrujEyduiFoi .section __DATA,__const .p2align 4 vpshufb_shf_table: .quad 0x8786858483828100, 0x8f8e8d8c8b8a8988 .quad 0x0706050403020100, 0x000e0d0c0b0a0908 mask1: .quad 0x8080808080808080, 0x8080808080808080 const_dq3210: .quad 0, 0, 1, 1, 2, 2, 3, 3 const_dq5678: .quad 8, 8, 7, 7, 6, 6, 5, 5 const_dq7654: .quad 4, 4, 5, 5, 6, 6, 7, 7 const_dq1234: .quad 4, 4, 3, 3, 2, 2, 1, 1 shufb_15_7: .byte 15, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 7, 0xff, 0xff .byte 0xff, 0xff, 0xff, 0xff, 0xff .text #endif #endif
marvin-hansen/iggy-streaming-system
55,884
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/x86_64-mont5.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _bn_mul_mont_gather5 .private_extern _bn_mul_mont_gather5 .p2align 6 _bn_mul_mont_gather5: _CET_ENDBR movl %r9d,%r9d movq %rsp,%rax testl $7,%r9d jnz L$mul_enter #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%r11 movl 8(%r11),%r11d #endif jmp L$mul4x_enter .p2align 4 L$mul_enter: movd 8(%rsp),%xmm5 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 negq %r9 movq %rsp,%r11 leaq -280(%rsp,%r9,8),%r10 negq %r9 andq $-1024,%r10 subq %r10,%r11 andq $-4096,%r11 leaq (%r10,%r11,1),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja L$mul_page_walk jmp L$mul_page_walk_done L$mul_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja L$mul_page_walk L$mul_page_walk_done: leaq L$inc(%rip),%r10 movq %rax,8(%rsp,%r9,8) L$mul_body: leaq 128(%rdx),%r12 movdqa 0(%r10),%xmm0 movdqa 16(%r10),%xmm1 leaq 24-112(%rsp,%r9,8),%r10 andq $-16,%r10 pshufd $0,%xmm5,%xmm5 movdqa %xmm1,%xmm4 movdqa %xmm1,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 .byte 0x67 movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,112(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,128(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,144(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,160(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,176(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,192(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,208(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,224(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,240(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,256(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,272(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,288(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,304(%r10) paddd %xmm2,%xmm3 .byte 0x67 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,320(%r10) pcmpeqd %xmm5,%xmm3 movdqa %xmm2,336(%r10) pand 64(%r12),%xmm0 pand 80(%r12),%xmm1 pand 96(%r12),%xmm2 movdqa %xmm3,352(%r10) pand 112(%r12),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -128(%r12),%xmm4 movdqa -112(%r12),%xmm5 movdqa -96(%r12),%xmm2 pand 112(%r10),%xmm4 movdqa -80(%r12),%xmm3 pand 128(%r10),%xmm5 por %xmm4,%xmm0 pand 144(%r10),%xmm2 por %xmm5,%xmm1 pand 160(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -64(%r12),%xmm4 movdqa -48(%r12),%xmm5 movdqa -32(%r12),%xmm2 pand 176(%r10),%xmm4 movdqa -16(%r12),%xmm3 pand 192(%r10),%xmm5 por %xmm4,%xmm0 pand 208(%r10),%xmm2 por %xmm5,%xmm1 pand 224(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa 0(%r12),%xmm4 movdqa 16(%r12),%xmm5 movdqa 32(%r12),%xmm2 pand 240(%r10),%xmm4 movdqa 48(%r12),%xmm3 pand 256(%r10),%xmm5 por %xmm4,%xmm0 pand 272(%r10),%xmm2 por %xmm5,%xmm1 pand 288(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 por %xmm1,%xmm0 pshufd $0x4e,%xmm0,%xmm1 por %xmm1,%xmm0 leaq 256(%r12),%r12 .byte 102,72,15,126,195 movq (%r8),%r8 movq (%rsi),%rax xorq %r14,%r14 xorq %r15,%r15 movq %r8,%rbp mulq %rbx movq %rax,%r10 movq (%rcx),%rax imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%r13 leaq 1(%r15),%r15 jmp L$1st_enter .p2align 4 L$1st: addq %rax,%r13 movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%r13 movq %r10,%r11 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 L$1st_enter: mulq %rbx addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx leaq 1(%r15),%r15 movq %rdx,%r10 mulq %rbp cmpq %r9,%r15 jne L$1st addq %rax,%r13 adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,-16(%rsp,%r9,8) movq %rdx,%r13 movq %r10,%r11 xorq %rdx,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r9,8) movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 jmp L$outer .p2align 4 L$outer: leaq 24+128(%rsp,%r9,8),%rdx andq $-16,%rdx pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 movdqa -128(%r12),%xmm0 movdqa -112(%r12),%xmm1 movdqa -96(%r12),%xmm2 movdqa -80(%r12),%xmm3 pand -128(%rdx),%xmm0 pand -112(%rdx),%xmm1 por %xmm0,%xmm4 pand -96(%rdx),%xmm2 por %xmm1,%xmm5 pand -80(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa -64(%r12),%xmm0 movdqa -48(%r12),%xmm1 movdqa -32(%r12),%xmm2 movdqa -16(%r12),%xmm3 pand -64(%rdx),%xmm0 pand -48(%rdx),%xmm1 por %xmm0,%xmm4 pand -32(%rdx),%xmm2 por %xmm1,%xmm5 pand -16(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 0(%r12),%xmm0 movdqa 16(%r12),%xmm1 movdqa 32(%r12),%xmm2 movdqa 48(%r12),%xmm3 pand 0(%rdx),%xmm0 pand 16(%rdx),%xmm1 por %xmm0,%xmm4 pand 32(%rdx),%xmm2 por %xmm1,%xmm5 pand 48(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 64(%r12),%xmm0 movdqa 80(%r12),%xmm1 movdqa 96(%r12),%xmm2 movdqa 112(%r12),%xmm3 pand 64(%rdx),%xmm0 pand 80(%rdx),%xmm1 por %xmm0,%xmm4 pand 96(%rdx),%xmm2 por %xmm1,%xmm5 pand 112(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 por %xmm5,%xmm4 pshufd $0x4e,%xmm4,%xmm0 por %xmm4,%xmm0 leaq 256(%r12),%r12 movq (%rsi),%rax .byte 102,72,15,126,195 xorq %r15,%r15 movq %r8,%rbp movq (%rsp),%r10 mulq %rbx addq %rax,%r10 movq (%rcx),%rax adcq $0,%rdx imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq 8(%rsp),%r10 movq %rdx,%r13 leaq 1(%r15),%r15 jmp L$inner_enter .p2align 4 L$inner: addq %rax,%r13 movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 movq (%rsp,%r15,8),%r10 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 L$inner_enter: mulq %rbx addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx addq %r11,%r10 movq %rdx,%r11 adcq $0,%r11 leaq 1(%r15),%r15 mulq %rbp cmpq %r9,%r15 jne L$inner addq %rax,%r13 adcq $0,%rdx addq %r10,%r13 movq (%rsp,%r9,8),%r10 adcq $0,%rdx movq %r13,-16(%rsp,%r9,8) movq %rdx,%r13 xorq %rdx,%rdx addq %r11,%r13 adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r9,8) movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 cmpq %r9,%r14 jb L$outer xorq %r14,%r14 movq (%rsp),%rax leaq (%rsp),%rsi movq %r9,%r15 jmp L$sub .p2align 4 L$sub: sbbq (%rcx,%r14,8),%rax movq %rax,(%rdi,%r14,8) movq 8(%rsi,%r14,8),%rax leaq 1(%r14),%r14 decq %r15 jnz L$sub sbbq $0,%rax movq $-1,%rbx xorq %rax,%rbx xorq %r14,%r14 movq %r9,%r15 L$copy: movq (%rdi,%r14,8),%rcx movq (%rsp,%r14,8),%rdx andq %rbx,%rcx andq %rax,%rdx movq %r14,(%rsp,%r14,8) orq %rcx,%rdx movq %rdx,(%rdi,%r14,8) leaq 1(%r14),%r14 subq $1,%r15 jnz L$copy movq 8(%rsp,%r9,8),%rsi movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$mul_epilogue: .byte 0xf3,0xc3 .p2align 5 bn_mul4x_mont_gather5: .byte 0x67 movq %rsp,%rax L$mul4x_enter: #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX andl $0x80108,%r11d cmpl $0x80108,%r11d je L$mulx4x_enter #endif pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$mul4x_prologue: .byte 0x67 shll $3,%r9d leaq (%r9,%r9,2),%r10 negq %r9 leaq -320(%rsp,%r9,2),%r11 movq %rsp,%rbp subq %rdi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb L$mul4xsp_alt subq %r11,%rbp leaq -320(%rbp,%r9,2),%rbp jmp L$mul4xsp_done .p2align 5 L$mul4xsp_alt: leaq 4096-320(,%r9,2),%r10 leaq -320(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp L$mul4xsp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$mul4x_page_walk jmp L$mul4x_page_walk_done L$mul4x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$mul4x_page_walk L$mul4x_page_walk_done: negq %r9 movq %rax,40(%rsp) L$mul4x_body: call mul4x_internal movq 40(%rsp),%rsi movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$mul4x_epilogue: .byte 0xf3,0xc3 .p2align 5 mul4x_internal: shlq $5,%r9 movd 8(%rax),%xmm5 leaq L$inc(%rip),%rax leaq 128(%rdx,%r9,1),%r13 shrq $5,%r9 movdqa 0(%rax),%xmm0 movdqa 16(%rax),%xmm1 leaq 88-112(%rsp,%r9,1),%r10 leaq 128(%rdx),%r12 pshufd $0,%xmm5,%xmm5 movdqa %xmm1,%xmm4 .byte 0x67,0x67 movdqa %xmm1,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 .byte 0x67 movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,112(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,128(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,144(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,160(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,176(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,192(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,208(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,224(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,240(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,256(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,272(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,288(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,304(%r10) paddd %xmm2,%xmm3 .byte 0x67 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,320(%r10) pcmpeqd %xmm5,%xmm3 movdqa %xmm2,336(%r10) pand 64(%r12),%xmm0 pand 80(%r12),%xmm1 pand 96(%r12),%xmm2 movdqa %xmm3,352(%r10) pand 112(%r12),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -128(%r12),%xmm4 movdqa -112(%r12),%xmm5 movdqa -96(%r12),%xmm2 pand 112(%r10),%xmm4 movdqa -80(%r12),%xmm3 pand 128(%r10),%xmm5 por %xmm4,%xmm0 pand 144(%r10),%xmm2 por %xmm5,%xmm1 pand 160(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -64(%r12),%xmm4 movdqa -48(%r12),%xmm5 movdqa -32(%r12),%xmm2 pand 176(%r10),%xmm4 movdqa -16(%r12),%xmm3 pand 192(%r10),%xmm5 por %xmm4,%xmm0 pand 208(%r10),%xmm2 por %xmm5,%xmm1 pand 224(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa 0(%r12),%xmm4 movdqa 16(%r12),%xmm5 movdqa 32(%r12),%xmm2 pand 240(%r10),%xmm4 movdqa 48(%r12),%xmm3 pand 256(%r10),%xmm5 por %xmm4,%xmm0 pand 272(%r10),%xmm2 por %xmm5,%xmm1 pand 288(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 por %xmm1,%xmm0 pshufd $0x4e,%xmm0,%xmm1 por %xmm1,%xmm0 leaq 256(%r12),%r12 .byte 102,72,15,126,195 movq %r13,16+8(%rsp) movq %rdi,56+8(%rsp) movq (%r8),%r8 movq (%rsi),%rax leaq (%rsi,%r9,1),%rsi negq %r9 movq %r8,%rbp mulq %rbx movq %rax,%r10 movq (%rcx),%rax imulq %r10,%rbp leaq 64+8(%rsp),%r14 movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi,%r9,1),%rax adcq $0,%rdx movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi,%r9,1),%rax adcq $0,%rdx addq %r11,%rdi leaq 32(%r9),%r15 leaq 32(%rcx),%rcx adcq $0,%rdx movq %rdi,(%r14) movq %rdx,%r13 jmp L$1st4x .p2align 5 L$1st4x: mulq %rbx addq %rax,%r10 movq -16(%rcx),%rax leaq 32(%r14),%r14 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,1),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r15,1),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%r14) movq %rdx,%r13 mulq %rbx addq %rax,%r10 movq 0(%rcx),%rax adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq 8(%rsi,%r15,1),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi,%r15,1),%rax adcq $0,%rdx addq %r11,%rdi leaq 32(%rcx),%rcx adcq $0,%rdx movq %rdi,(%r14) movq %rdx,%r13 addq $32,%r15 jnz L$1st4x mulq %rbx addq %rax,%r10 movq -16(%rcx),%rax leaq 32(%r14),%r14 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r9,1),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%r14) movq %rdx,%r13 leaq (%rcx,%r9,1),%rcx xorq %rdi,%rdi addq %r10,%r13 adcq $0,%rdi movq %r13,-8(%r14) jmp L$outer4x .p2align 5 L$outer4x: leaq 16+128(%r14),%rdx pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 movdqa -128(%r12),%xmm0 movdqa -112(%r12),%xmm1 movdqa -96(%r12),%xmm2 movdqa -80(%r12),%xmm3 pand -128(%rdx),%xmm0 pand -112(%rdx),%xmm1 por %xmm0,%xmm4 pand -96(%rdx),%xmm2 por %xmm1,%xmm5 pand -80(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa -64(%r12),%xmm0 movdqa -48(%r12),%xmm1 movdqa -32(%r12),%xmm2 movdqa -16(%r12),%xmm3 pand -64(%rdx),%xmm0 pand -48(%rdx),%xmm1 por %xmm0,%xmm4 pand -32(%rdx),%xmm2 por %xmm1,%xmm5 pand -16(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 0(%r12),%xmm0 movdqa 16(%r12),%xmm1 movdqa 32(%r12),%xmm2 movdqa 48(%r12),%xmm3 pand 0(%rdx),%xmm0 pand 16(%rdx),%xmm1 por %xmm0,%xmm4 pand 32(%rdx),%xmm2 por %xmm1,%xmm5 pand 48(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 64(%r12),%xmm0 movdqa 80(%r12),%xmm1 movdqa 96(%r12),%xmm2 movdqa 112(%r12),%xmm3 pand 64(%rdx),%xmm0 pand 80(%rdx),%xmm1 por %xmm0,%xmm4 pand 96(%rdx),%xmm2 por %xmm1,%xmm5 pand 112(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 por %xmm5,%xmm4 pshufd $0x4e,%xmm4,%xmm0 por %xmm4,%xmm0 leaq 256(%r12),%r12 .byte 102,72,15,126,195 movq (%r14,%r9,1),%r10 movq %r8,%rbp mulq %rbx addq %rax,%r10 movq (%rcx),%rax adcq $0,%rdx imulq %r10,%rbp movq %rdx,%r11 movq %rdi,(%r14) leaq (%r14,%r9,1),%r14 mulq %rbp addq %rax,%r10 movq 8(%rsi,%r9,1),%rax adcq $0,%rdx movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx addq 8(%r14),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi,%r9,1),%rax adcq $0,%rdx addq %r11,%rdi leaq 32(%r9),%r15 leaq 32(%rcx),%rcx adcq $0,%rdx movq %rdx,%r13 jmp L$inner4x .p2align 5 L$inner4x: mulq %rbx addq %rax,%r10 movq -16(%rcx),%rax adcq $0,%rdx addq 16(%r14),%r10 leaq 32(%r14),%r14 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,1),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %rdi,-32(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx),%rax adcq $0,%rdx addq -8(%r14),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r15,1),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %r13,-24(%r14) movq %rdx,%r13 mulq %rbx addq %rax,%r10 movq 0(%rcx),%rax adcq $0,%rdx addq (%r14),%r10 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq 8(%rsi,%r15,1),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %rdi,-16(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx addq 8(%r14),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi,%r15,1),%rax adcq $0,%rdx addq %r11,%rdi leaq 32(%rcx),%rcx adcq $0,%rdx movq %r13,-8(%r14) movq %rdx,%r13 addq $32,%r15 jnz L$inner4x mulq %rbx addq %rax,%r10 movq -16(%rcx),%rax adcq $0,%rdx addq 16(%r14),%r10 leaq 32(%r14),%r14 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %rdi,-32(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq %rbp,%rax movq -8(%rcx),%rbp adcq $0,%rdx addq -8(%r14),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r9,1),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %r13,-24(%r14) movq %rdx,%r13 movq %rdi,-16(%r14) leaq (%rcx,%r9,1),%rcx xorq %rdi,%rdi addq %r10,%r13 adcq $0,%rdi addq (%r14),%r13 adcq $0,%rdi movq %r13,-8(%r14) cmpq 16+8(%rsp),%r12 jb L$outer4x xorq %rax,%rax subq %r13,%rbp adcq %r15,%r15 orq %r15,%rdi subq %rdi,%rax leaq (%r14,%r9,1),%rbx movq (%rcx),%r12 leaq (%rcx),%rbp movq %r9,%rcx sarq $3+2,%rcx movq 56+8(%rsp),%rdi decq %r12 xorq %r10,%r10 movq 8(%rbp),%r13 movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp L$sqr4x_sub_entry .globl _bn_power5 .private_extern _bn_power5 .p2align 5 _bn_power5: _CET_ENDBR movq %rsp,%rax #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%r11 movl 8(%r11),%r11d andl $0x80108,%r11d cmpl $0x80108,%r11d je L$powerx5_enter #endif pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$power5_prologue: shll $3,%r9d leal (%r9,%r9,2),%r10d negq %r9 movq (%r8),%r8 leaq -320(%rsp,%r9,2),%r11 movq %rsp,%rbp subq %rdi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb L$pwr_sp_alt subq %r11,%rbp leaq -320(%rbp,%r9,2),%rbp jmp L$pwr_sp_done .p2align 5 L$pwr_sp_alt: leaq 4096-320(,%r9,2),%r10 leaq -320(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp L$pwr_sp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$pwr_page_walk jmp L$pwr_page_walk_done L$pwr_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$pwr_page_walk L$pwr_page_walk_done: movq %r9,%r10 negq %r9 movq %r8,32(%rsp) movq %rax,40(%rsp) L$power5_body: .byte 102,72,15,110,207 .byte 102,72,15,110,209 .byte 102,73,15,110,218 .byte 102,72,15,110,226 call __bn_sqr8x_internal call __bn_post4x_internal call __bn_sqr8x_internal call __bn_post4x_internal call __bn_sqr8x_internal call __bn_post4x_internal call __bn_sqr8x_internal call __bn_post4x_internal call __bn_sqr8x_internal call __bn_post4x_internal .byte 102,72,15,126,209 .byte 102,72,15,126,226 movq %rsi,%rdi movq 40(%rsp),%rax leaq 32(%rsp),%r8 call mul4x_internal movq 40(%rsp),%rsi movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$power5_epilogue: .byte 0xf3,0xc3 .globl _bn_sqr8x_internal .private_extern _bn_sqr8x_internal .private_extern _bn_sqr8x_internal .p2align 5 _bn_sqr8x_internal: __bn_sqr8x_internal: _CET_ENDBR leaq 32(%r10),%rbp leaq (%rsi,%r9,1),%rsi movq %r9,%rcx movq -32(%rsi,%rbp,1),%r14 leaq 48+8(%rsp,%r9,2),%rdi movq -24(%rsi,%rbp,1),%rax leaq -32(%rdi,%rbp,1),%rdi movq -16(%rsi,%rbp,1),%rbx movq %rax,%r15 mulq %r14 movq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 movq %r10,-24(%rdi,%rbp,1) mulq %r14 addq %rax,%r11 movq %rbx,%rax adcq $0,%rdx movq %r11,-16(%rdi,%rbp,1) movq %rdx,%r10 movq -8(%rsi,%rbp,1),%rbx mulq %r15 movq %rax,%r12 movq %rbx,%rax movq %rdx,%r13 leaq (%rbp),%rcx mulq %r14 addq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 adcq $0,%r11 addq %r12,%r10 adcq $0,%r11 movq %r10,-8(%rdi,%rcx,1) jmp L$sqr4x_1st .p2align 5 L$sqr4x_1st: movq (%rsi,%rcx,1),%rbx mulq %r15 addq %rax,%r13 movq %rbx,%rax movq %rdx,%r12 adcq $0,%r12 mulq %r14 addq %rax,%r11 movq %rbx,%rax movq 8(%rsi,%rcx,1),%rbx movq %rdx,%r10 adcq $0,%r10 addq %r13,%r11 adcq $0,%r10 mulq %r15 addq %rax,%r12 movq %rbx,%rax movq %r11,(%rdi,%rcx,1) movq %rdx,%r13 adcq $0,%r13 mulq %r14 addq %rax,%r10 movq %rbx,%rax movq 16(%rsi,%rcx,1),%rbx movq %rdx,%r11 adcq $0,%r11 addq %r12,%r10 adcq $0,%r11 mulq %r15 addq %rax,%r13 movq %rbx,%rax movq %r10,8(%rdi,%rcx,1) movq %rdx,%r12 adcq $0,%r12 mulq %r14 addq %rax,%r11 movq %rbx,%rax movq 24(%rsi,%rcx,1),%rbx movq %rdx,%r10 adcq $0,%r10 addq %r13,%r11 adcq $0,%r10 mulq %r15 addq %rax,%r12 movq %rbx,%rax movq %r11,16(%rdi,%rcx,1) movq %rdx,%r13 adcq $0,%r13 leaq 32(%rcx),%rcx mulq %r14 addq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 adcq $0,%r11 addq %r12,%r10 adcq $0,%r11 movq %r10,-8(%rdi,%rcx,1) cmpq $0,%rcx jne L$sqr4x_1st mulq %r15 addq %rax,%r13 leaq 16(%rbp),%rbp adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,(%rdi) movq %rdx,%r12 movq %rdx,8(%rdi) jmp L$sqr4x_outer .p2align 5 L$sqr4x_outer: movq -32(%rsi,%rbp,1),%r14 leaq 48+8(%rsp,%r9,2),%rdi movq -24(%rsi,%rbp,1),%rax leaq -32(%rdi,%rbp,1),%rdi movq -16(%rsi,%rbp,1),%rbx movq %rax,%r15 mulq %r14 movq -24(%rdi,%rbp,1),%r10 addq %rax,%r10 movq %rbx,%rax adcq $0,%rdx movq %r10,-24(%rdi,%rbp,1) movq %rdx,%r11 mulq %r14 addq %rax,%r11 movq %rbx,%rax adcq $0,%rdx addq -16(%rdi,%rbp,1),%r11 movq %rdx,%r10 adcq $0,%r10 movq %r11,-16(%rdi,%rbp,1) xorq %r12,%r12 movq -8(%rsi,%rbp,1),%rbx mulq %r15 addq %rax,%r12 movq %rbx,%rax adcq $0,%rdx addq -8(%rdi,%rbp,1),%r12 movq %rdx,%r13 adcq $0,%r13 mulq %r14 addq %rax,%r10 movq %rbx,%rax adcq $0,%rdx addq %r12,%r10 movq %rdx,%r11 adcq $0,%r11 movq %r10,-8(%rdi,%rbp,1) leaq (%rbp),%rcx jmp L$sqr4x_inner .p2align 5 L$sqr4x_inner: movq (%rsi,%rcx,1),%rbx mulq %r15 addq %rax,%r13 movq %rbx,%rax movq %rdx,%r12 adcq $0,%r12 addq (%rdi,%rcx,1),%r13 adcq $0,%r12 .byte 0x67 mulq %r14 addq %rax,%r11 movq %rbx,%rax movq 8(%rsi,%rcx,1),%rbx movq %rdx,%r10 adcq $0,%r10 addq %r13,%r11 adcq $0,%r10 mulq %r15 addq %rax,%r12 movq %r11,(%rdi,%rcx,1) movq %rbx,%rax movq %rdx,%r13 adcq $0,%r13 addq 8(%rdi,%rcx,1),%r12 leaq 16(%rcx),%rcx adcq $0,%r13 mulq %r14 addq %rax,%r10 movq %rbx,%rax adcq $0,%rdx addq %r12,%r10 movq %rdx,%r11 adcq $0,%r11 movq %r10,-8(%rdi,%rcx,1) cmpq $0,%rcx jne L$sqr4x_inner .byte 0x67 mulq %r15 addq %rax,%r13 adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,(%rdi) movq %rdx,%r12 movq %rdx,8(%rdi) addq $16,%rbp jnz L$sqr4x_outer movq -32(%rsi),%r14 leaq 48+8(%rsp,%r9,2),%rdi movq -24(%rsi),%rax leaq -32(%rdi,%rbp,1),%rdi movq -16(%rsi),%rbx movq %rax,%r15 mulq %r14 addq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 adcq $0,%r11 mulq %r14 addq %rax,%r11 movq %rbx,%rax movq %r10,-24(%rdi) movq %rdx,%r10 adcq $0,%r10 addq %r13,%r11 movq -8(%rsi),%rbx adcq $0,%r10 mulq %r15 addq %rax,%r12 movq %rbx,%rax movq %r11,-16(%rdi) movq %rdx,%r13 adcq $0,%r13 mulq %r14 addq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 adcq $0,%r11 addq %r12,%r10 adcq $0,%r11 movq %r10,-8(%rdi) mulq %r15 addq %rax,%r13 movq -16(%rsi),%rax adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,(%rdi) movq %rdx,%r12 movq %rdx,8(%rdi) mulq %rbx addq $16,%rbp xorq %r14,%r14 subq %r9,%rbp xorq %r15,%r15 addq %r12,%rax adcq $0,%rdx movq %rax,8(%rdi) movq %rdx,16(%rdi) movq %r15,24(%rdi) movq -16(%rsi,%rbp,1),%rax leaq 48+8(%rsp),%rdi xorq %r10,%r10 movq 8(%rdi),%r11 leaq (%r14,%r10,2),%r12 shrq $63,%r10 leaq (%rcx,%r11,2),%r13 shrq $63,%r11 orq %r10,%r13 movq 16(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 24(%rdi),%r11 adcq %rax,%r12 movq -8(%rsi,%rbp,1),%rax movq %r12,(%rdi) adcq %rdx,%r13 leaq (%r14,%r10,2),%rbx movq %r13,8(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r8 shrq $63,%r11 orq %r10,%r8 movq 32(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 40(%rdi),%r11 adcq %rax,%rbx movq 0(%rsi,%rbp,1),%rax movq %rbx,16(%rdi) adcq %rdx,%r8 leaq 16(%rbp),%rbp movq %r8,24(%rdi) sbbq %r15,%r15 leaq 64(%rdi),%rdi jmp L$sqr4x_shift_n_add .p2align 5 L$sqr4x_shift_n_add: leaq (%r14,%r10,2),%r12 shrq $63,%r10 leaq (%rcx,%r11,2),%r13 shrq $63,%r11 orq %r10,%r13 movq -16(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq -8(%rdi),%r11 adcq %rax,%r12 movq -8(%rsi,%rbp,1),%rax movq %r12,-32(%rdi) adcq %rdx,%r13 leaq (%r14,%r10,2),%rbx movq %r13,-24(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r8 shrq $63,%r11 orq %r10,%r8 movq 0(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 8(%rdi),%r11 adcq %rax,%rbx movq 0(%rsi,%rbp,1),%rax movq %rbx,-16(%rdi) adcq %rdx,%r8 leaq (%r14,%r10,2),%r12 movq %r8,-8(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r13 shrq $63,%r11 orq %r10,%r13 movq 16(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 24(%rdi),%r11 adcq %rax,%r12 movq 8(%rsi,%rbp,1),%rax movq %r12,0(%rdi) adcq %rdx,%r13 leaq (%r14,%r10,2),%rbx movq %r13,8(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r8 shrq $63,%r11 orq %r10,%r8 movq 32(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 40(%rdi),%r11 adcq %rax,%rbx movq 16(%rsi,%rbp,1),%rax movq %rbx,16(%rdi) adcq %rdx,%r8 movq %r8,24(%rdi) sbbq %r15,%r15 leaq 64(%rdi),%rdi addq $32,%rbp jnz L$sqr4x_shift_n_add leaq (%r14,%r10,2),%r12 .byte 0x67 shrq $63,%r10 leaq (%rcx,%r11,2),%r13 shrq $63,%r11 orq %r10,%r13 movq -16(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq -8(%rdi),%r11 adcq %rax,%r12 movq -8(%rsi),%rax movq %r12,-32(%rdi) adcq %rdx,%r13 leaq (%r14,%r10,2),%rbx movq %r13,-24(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r8 shrq $63,%r11 orq %r10,%r8 mulq %rax negq %r15 adcq %rax,%rbx adcq %rdx,%r8 movq %rbx,-16(%rdi) movq %r8,-8(%rdi) .byte 102,72,15,126,213 __bn_sqr8x_reduction: xorq %rax,%rax leaq (%r9,%rbp,1),%rcx leaq 48+8(%rsp,%r9,2),%rdx movq %rcx,0+8(%rsp) leaq 48+8(%rsp,%r9,1),%rdi movq %rdx,8+8(%rsp) negq %r9 jmp L$8x_reduction_loop .p2align 5 L$8x_reduction_loop: leaq (%rdi,%r9,1),%rdi .byte 0x66 movq 0(%rdi),%rbx movq 8(%rdi),%r9 movq 16(%rdi),%r10 movq 24(%rdi),%r11 movq 32(%rdi),%r12 movq 40(%rdi),%r13 movq 48(%rdi),%r14 movq 56(%rdi),%r15 movq %rax,(%rdx) leaq 64(%rdi),%rdi .byte 0x67 movq %rbx,%r8 imulq 32+8(%rsp),%rbx movq 0(%rbp),%rax movl $8,%ecx jmp L$8x_reduce .p2align 5 L$8x_reduce: mulq %rbx movq 8(%rbp),%rax negq %r8 movq %rdx,%r8 adcq $0,%r8 mulq %rbx addq %rax,%r9 movq 16(%rbp),%rax adcq $0,%rdx addq %r9,%r8 movq %rbx,48-8+8(%rsp,%rcx,8) movq %rdx,%r9 adcq $0,%r9 mulq %rbx addq %rax,%r10 movq 24(%rbp),%rax adcq $0,%rdx addq %r10,%r9 movq 32+8(%rsp),%rsi movq %rdx,%r10 adcq $0,%r10 mulq %rbx addq %rax,%r11 movq 32(%rbp),%rax adcq $0,%rdx imulq %r8,%rsi addq %r11,%r10 movq %rdx,%r11 adcq $0,%r11 mulq %rbx addq %rax,%r12 movq 40(%rbp),%rax adcq $0,%rdx addq %r12,%r11 movq %rdx,%r12 adcq $0,%r12 mulq %rbx addq %rax,%r13 movq 48(%rbp),%rax adcq $0,%rdx addq %r13,%r12 movq %rdx,%r13 adcq $0,%r13 mulq %rbx addq %rax,%r14 movq 56(%rbp),%rax adcq $0,%rdx addq %r14,%r13 movq %rdx,%r14 adcq $0,%r14 mulq %rbx movq %rsi,%rbx addq %rax,%r15 movq 0(%rbp),%rax adcq $0,%rdx addq %r15,%r14 movq %rdx,%r15 adcq $0,%r15 decl %ecx jnz L$8x_reduce leaq 64(%rbp),%rbp xorq %rax,%rax movq 8+8(%rsp),%rdx cmpq 0+8(%rsp),%rbp jae L$8x_no_tail .byte 0x66 addq 0(%rdi),%r8 adcq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 sbbq %rsi,%rsi movq 48+56+8(%rsp),%rbx movl $8,%ecx movq 0(%rbp),%rax jmp L$8x_tail .p2align 5 L$8x_tail: mulq %rbx addq %rax,%r8 movq 8(%rbp),%rax movq %r8,(%rdi) movq %rdx,%r8 adcq $0,%r8 mulq %rbx addq %rax,%r9 movq 16(%rbp),%rax adcq $0,%rdx addq %r9,%r8 leaq 8(%rdi),%rdi movq %rdx,%r9 adcq $0,%r9 mulq %rbx addq %rax,%r10 movq 24(%rbp),%rax adcq $0,%rdx addq %r10,%r9 movq %rdx,%r10 adcq $0,%r10 mulq %rbx addq %rax,%r11 movq 32(%rbp),%rax adcq $0,%rdx addq %r11,%r10 movq %rdx,%r11 adcq $0,%r11 mulq %rbx addq %rax,%r12 movq 40(%rbp),%rax adcq $0,%rdx addq %r12,%r11 movq %rdx,%r12 adcq $0,%r12 mulq %rbx addq %rax,%r13 movq 48(%rbp),%rax adcq $0,%rdx addq %r13,%r12 movq %rdx,%r13 adcq $0,%r13 mulq %rbx addq %rax,%r14 movq 56(%rbp),%rax adcq $0,%rdx addq %r14,%r13 movq %rdx,%r14 adcq $0,%r14 mulq %rbx movq 48-16+8(%rsp,%rcx,8),%rbx addq %rax,%r15 adcq $0,%rdx addq %r15,%r14 movq 0(%rbp),%rax movq %rdx,%r15 adcq $0,%r15 decl %ecx jnz L$8x_tail leaq 64(%rbp),%rbp movq 8+8(%rsp),%rdx cmpq 0+8(%rsp),%rbp jae L$8x_tail_done movq 48+56+8(%rsp),%rbx negq %rsi movq 0(%rbp),%rax adcq 0(%rdi),%r8 adcq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 sbbq %rsi,%rsi movl $8,%ecx jmp L$8x_tail .p2align 5 L$8x_tail_done: xorq %rax,%rax addq (%rdx),%r8 adcq $0,%r9 adcq $0,%r10 adcq $0,%r11 adcq $0,%r12 adcq $0,%r13 adcq $0,%r14 adcq $0,%r15 adcq $0,%rax negq %rsi L$8x_no_tail: adcq 0(%rdi),%r8 adcq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 adcq $0,%rax movq -8(%rbp),%rcx xorq %rsi,%rsi .byte 102,72,15,126,213 movq %r8,0(%rdi) movq %r9,8(%rdi) .byte 102,73,15,126,217 movq %r10,16(%rdi) movq %r11,24(%rdi) movq %r12,32(%rdi) movq %r13,40(%rdi) movq %r14,48(%rdi) movq %r15,56(%rdi) leaq 64(%rdi),%rdi cmpq %rdx,%rdi jb L$8x_reduction_loop .byte 0xf3,0xc3 .p2align 5 __bn_post4x_internal: movq 0(%rbp),%r12 leaq (%rdi,%r9,1),%rbx movq %r9,%rcx .byte 102,72,15,126,207 negq %rax .byte 102,72,15,126,206 sarq $3+2,%rcx decq %r12 xorq %r10,%r10 movq 8(%rbp),%r13 movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp L$sqr4x_sub_entry .p2align 4 L$sqr4x_sub: movq 0(%rbp),%r12 movq 8(%rbp),%r13 movq 16(%rbp),%r14 movq 24(%rbp),%r15 L$sqr4x_sub_entry: leaq 32(%rbp),%rbp notq %r12 notq %r13 notq %r14 notq %r15 andq %rax,%r12 andq %rax,%r13 andq %rax,%r14 andq %rax,%r15 negq %r10 adcq 0(%rbx),%r12 adcq 8(%rbx),%r13 adcq 16(%rbx),%r14 adcq 24(%rbx),%r15 movq %r12,0(%rdi) leaq 32(%rbx),%rbx movq %r13,8(%rdi) sbbq %r10,%r10 movq %r14,16(%rdi) movq %r15,24(%rdi) leaq 32(%rdi),%rdi incq %rcx jnz L$sqr4x_sub movq %r9,%r10 negq %r9 .byte 0xf3,0xc3 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .p2align 5 bn_mulx4x_mont_gather5: movq %rsp,%rax L$mulx4x_enter: pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$mulx4x_prologue: shll $3,%r9d leaq (%r9,%r9,2),%r10 negq %r9 movq (%r8),%r8 leaq -320(%rsp,%r9,2),%r11 movq %rsp,%rbp subq %rdi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb L$mulx4xsp_alt subq %r11,%rbp leaq -320(%rbp,%r9,2),%rbp jmp L$mulx4xsp_done L$mulx4xsp_alt: leaq 4096-320(,%r9,2),%r10 leaq -320(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp L$mulx4xsp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$mulx4x_page_walk jmp L$mulx4x_page_walk_done L$mulx4x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$mulx4x_page_walk L$mulx4x_page_walk_done: movq %r8,32(%rsp) movq %rax,40(%rsp) L$mulx4x_body: call mulx4x_internal movq 40(%rsp),%rsi movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$mulx4x_epilogue: .byte 0xf3,0xc3 .p2align 5 mulx4x_internal: movq %r9,8(%rsp) movq %r9,%r10 negq %r9 shlq $5,%r9 negq %r10 leaq 128(%rdx,%r9,1),%r13 shrq $5+5,%r9 movd 8(%rax),%xmm5 subq $1,%r9 leaq L$inc(%rip),%rax movq %r13,16+8(%rsp) movq %r9,24+8(%rsp) movq %rdi,56+8(%rsp) movdqa 0(%rax),%xmm0 movdqa 16(%rax),%xmm1 leaq 88-112(%rsp,%r10,1),%r10 leaq 128(%rdx),%rdi pshufd $0,%xmm5,%xmm5 movdqa %xmm1,%xmm4 .byte 0x67 movdqa %xmm1,%xmm2 .byte 0x67 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,112(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,128(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,144(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,160(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,176(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,192(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,208(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,224(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,240(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,256(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,272(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,288(%r10) movdqa %xmm4,%xmm3 .byte 0x67 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,304(%r10) paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,320(%r10) pcmpeqd %xmm5,%xmm3 movdqa %xmm2,336(%r10) pand 64(%rdi),%xmm0 pand 80(%rdi),%xmm1 pand 96(%rdi),%xmm2 movdqa %xmm3,352(%r10) pand 112(%rdi),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -128(%rdi),%xmm4 movdqa -112(%rdi),%xmm5 movdqa -96(%rdi),%xmm2 pand 112(%r10),%xmm4 movdqa -80(%rdi),%xmm3 pand 128(%r10),%xmm5 por %xmm4,%xmm0 pand 144(%r10),%xmm2 por %xmm5,%xmm1 pand 160(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -64(%rdi),%xmm4 movdqa -48(%rdi),%xmm5 movdqa -32(%rdi),%xmm2 pand 176(%r10),%xmm4 movdqa -16(%rdi),%xmm3 pand 192(%r10),%xmm5 por %xmm4,%xmm0 pand 208(%r10),%xmm2 por %xmm5,%xmm1 pand 224(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa 0(%rdi),%xmm4 movdqa 16(%rdi),%xmm5 movdqa 32(%rdi),%xmm2 pand 240(%r10),%xmm4 movdqa 48(%rdi),%xmm3 pand 256(%r10),%xmm5 por %xmm4,%xmm0 pand 272(%r10),%xmm2 por %xmm5,%xmm1 pand 288(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 pxor %xmm1,%xmm0 pshufd $0x4e,%xmm0,%xmm1 por %xmm1,%xmm0 leaq 256(%rdi),%rdi .byte 102,72,15,126,194 leaq 64+32+8(%rsp),%rbx movq %rdx,%r9 mulxq 0(%rsi),%r8,%rax mulxq 8(%rsi),%r11,%r12 addq %rax,%r11 mulxq 16(%rsi),%rax,%r13 adcq %rax,%r12 adcq $0,%r13 mulxq 24(%rsi),%rax,%r14 movq %r8,%r15 imulq 32+8(%rsp),%r8 xorq %rbp,%rbp movq %r8,%rdx movq %rdi,8+8(%rsp) leaq 32(%rsi),%rsi adcxq %rax,%r13 adcxq %rbp,%r14 mulxq 0(%rcx),%rax,%r10 adcxq %rax,%r15 adoxq %r11,%r10 mulxq 8(%rcx),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 mulxq 16(%rcx),%rax,%r12 movq 24+8(%rsp),%rdi movq %r10,-32(%rbx) adcxq %rax,%r11 adoxq %r13,%r12 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r11,-24(%rbx) adcxq %rax,%r12 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r12,-16(%rbx) jmp L$mulx4x_1st .p2align 5 L$mulx4x_1st: adcxq %rbp,%r15 mulxq 0(%rsi),%r10,%rax adcxq %r14,%r10 mulxq 8(%rsi),%r11,%r14 adcxq %rax,%r11 mulxq 16(%rsi),%r12,%rax adcxq %r14,%r12 mulxq 24(%rsi),%r13,%r14 .byte 0x67,0x67 movq %r8,%rdx adcxq %rax,%r13 adcxq %rbp,%r14 leaq 32(%rsi),%rsi leaq 32(%rbx),%rbx adoxq %r15,%r10 mulxq 0(%rcx),%rax,%r15 adcxq %rax,%r10 adoxq %r15,%r11 mulxq 8(%rcx),%rax,%r15 adcxq %rax,%r11 adoxq %r15,%r12 mulxq 16(%rcx),%rax,%r15 movq %r10,-40(%rbx) adcxq %rax,%r12 movq %r11,-32(%rbx) adoxq %r15,%r13 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r12,-24(%rbx) adcxq %rax,%r13 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r13,-16(%rbx) decq %rdi jnz L$mulx4x_1st movq 8(%rsp),%rax adcq %rbp,%r15 leaq (%rsi,%rax,1),%rsi addq %r15,%r14 movq 8+8(%rsp),%rdi adcq %rbp,%rbp movq %r14,-8(%rbx) jmp L$mulx4x_outer .p2align 5 L$mulx4x_outer: leaq 16-256(%rbx),%r10 pxor %xmm4,%xmm4 .byte 0x67,0x67 pxor %xmm5,%xmm5 movdqa -128(%rdi),%xmm0 movdqa -112(%rdi),%xmm1 movdqa -96(%rdi),%xmm2 pand 256(%r10),%xmm0 movdqa -80(%rdi),%xmm3 pand 272(%r10),%xmm1 por %xmm0,%xmm4 pand 288(%r10),%xmm2 por %xmm1,%xmm5 pand 304(%r10),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa -64(%rdi),%xmm0 movdqa -48(%rdi),%xmm1 movdqa -32(%rdi),%xmm2 pand 320(%r10),%xmm0 movdqa -16(%rdi),%xmm3 pand 336(%r10),%xmm1 por %xmm0,%xmm4 pand 352(%r10),%xmm2 por %xmm1,%xmm5 pand 368(%r10),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 0(%rdi),%xmm0 movdqa 16(%rdi),%xmm1 movdqa 32(%rdi),%xmm2 pand 384(%r10),%xmm0 movdqa 48(%rdi),%xmm3 pand 400(%r10),%xmm1 por %xmm0,%xmm4 pand 416(%r10),%xmm2 por %xmm1,%xmm5 pand 432(%r10),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 64(%rdi),%xmm0 movdqa 80(%rdi),%xmm1 movdqa 96(%rdi),%xmm2 pand 448(%r10),%xmm0 movdqa 112(%rdi),%xmm3 pand 464(%r10),%xmm1 por %xmm0,%xmm4 pand 480(%r10),%xmm2 por %xmm1,%xmm5 pand 496(%r10),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 por %xmm5,%xmm4 pshufd $0x4e,%xmm4,%xmm0 por %xmm4,%xmm0 leaq 256(%rdi),%rdi .byte 102,72,15,126,194 movq %rbp,(%rbx) leaq 32(%rbx,%rax,1),%rbx mulxq 0(%rsi),%r8,%r11 xorq %rbp,%rbp movq %rdx,%r9 mulxq 8(%rsi),%r14,%r12 adoxq -32(%rbx),%r8 adcxq %r14,%r11 mulxq 16(%rsi),%r15,%r13 adoxq -24(%rbx),%r11 adcxq %r15,%r12 mulxq 24(%rsi),%rdx,%r14 adoxq -16(%rbx),%r12 adcxq %rdx,%r13 leaq (%rcx,%rax,1),%rcx leaq 32(%rsi),%rsi adoxq -8(%rbx),%r13 adcxq %rbp,%r14 adoxq %rbp,%r14 movq %r8,%r15 imulq 32+8(%rsp),%r8 movq %r8,%rdx xorq %rbp,%rbp movq %rdi,8+8(%rsp) mulxq 0(%rcx),%rax,%r10 adcxq %rax,%r15 adoxq %r11,%r10 mulxq 8(%rcx),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 mulxq 16(%rcx),%rax,%r12 adcxq %rax,%r11 adoxq %r13,%r12 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq 24+8(%rsp),%rdi movq %r10,-32(%rbx) adcxq %rax,%r12 movq %r11,-24(%rbx) adoxq %rbp,%r15 movq %r12,-16(%rbx) leaq 32(%rcx),%rcx jmp L$mulx4x_inner .p2align 5 L$mulx4x_inner: mulxq 0(%rsi),%r10,%rax adcxq %rbp,%r15 adoxq %r14,%r10 mulxq 8(%rsi),%r11,%r14 adcxq 0(%rbx),%r10 adoxq %rax,%r11 mulxq 16(%rsi),%r12,%rax adcxq 8(%rbx),%r11 adoxq %r14,%r12 mulxq 24(%rsi),%r13,%r14 movq %r8,%rdx adcxq 16(%rbx),%r12 adoxq %rax,%r13 adcxq 24(%rbx),%r13 adoxq %rbp,%r14 leaq 32(%rsi),%rsi leaq 32(%rbx),%rbx adcxq %rbp,%r14 adoxq %r15,%r10 mulxq 0(%rcx),%rax,%r15 adcxq %rax,%r10 adoxq %r15,%r11 mulxq 8(%rcx),%rax,%r15 adcxq %rax,%r11 adoxq %r15,%r12 mulxq 16(%rcx),%rax,%r15 movq %r10,-40(%rbx) adcxq %rax,%r12 adoxq %r15,%r13 movq %r11,-32(%rbx) mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx leaq 32(%rcx),%rcx movq %r12,-24(%rbx) adcxq %rax,%r13 adoxq %rbp,%r15 movq %r13,-16(%rbx) decq %rdi jnz L$mulx4x_inner movq 0+8(%rsp),%rax adcq %rbp,%r15 subq 0(%rbx),%rdi movq 8+8(%rsp),%rdi movq 16+8(%rsp),%r10 adcq %r15,%r14 leaq (%rsi,%rax,1),%rsi adcq %rbp,%rbp movq %r14,-8(%rbx) cmpq %r10,%rdi jb L$mulx4x_outer movq -8(%rcx),%r10 movq %rbp,%r8 movq (%rcx,%rax,1),%r12 leaq (%rcx,%rax,1),%rbp movq %rax,%rcx leaq (%rbx,%rax,1),%rdi xorl %eax,%eax xorq %r15,%r15 subq %r14,%r10 adcq %r15,%r15 orq %r15,%r8 sarq $3+2,%rcx subq %r8,%rax movq 56+8(%rsp),%rdx decq %r12 movq 8(%rbp),%r13 xorq %r8,%r8 movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp L$sqrx4x_sub_entry .p2align 5 bn_powerx5: movq %rsp,%rax L$powerx5_enter: pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$powerx5_prologue: shll $3,%r9d leaq (%r9,%r9,2),%r10 negq %r9 movq (%r8),%r8 leaq -320(%rsp,%r9,2),%r11 movq %rsp,%rbp subq %rdi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb L$pwrx_sp_alt subq %r11,%rbp leaq -320(%rbp,%r9,2),%rbp jmp L$pwrx_sp_done .p2align 5 L$pwrx_sp_alt: leaq 4096-320(,%r9,2),%r10 leaq -320(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp L$pwrx_sp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$pwrx_page_walk jmp L$pwrx_page_walk_done L$pwrx_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$pwrx_page_walk L$pwrx_page_walk_done: movq %r9,%r10 negq %r9 pxor %xmm0,%xmm0 .byte 102,72,15,110,207 .byte 102,72,15,110,209 .byte 102,73,15,110,218 .byte 102,72,15,110,226 movq %r8,32(%rsp) movq %rax,40(%rsp) L$powerx5_body: call __bn_sqrx8x_internal call __bn_postx4x_internal call __bn_sqrx8x_internal call __bn_postx4x_internal call __bn_sqrx8x_internal call __bn_postx4x_internal call __bn_sqrx8x_internal call __bn_postx4x_internal call __bn_sqrx8x_internal call __bn_postx4x_internal movq %r10,%r9 movq %rsi,%rdi .byte 102,72,15,126,209 .byte 102,72,15,126,226 movq 40(%rsp),%rax call mulx4x_internal movq 40(%rsp),%rsi movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$powerx5_epilogue: .byte 0xf3,0xc3 .globl _bn_sqrx8x_internal .private_extern _bn_sqrx8x_internal .private_extern _bn_sqrx8x_internal .p2align 5 _bn_sqrx8x_internal: __bn_sqrx8x_internal: _CET_ENDBR leaq 48+8(%rsp),%rdi leaq (%rsi,%r9,1),%rbp movq %r9,0+8(%rsp) movq %rbp,8+8(%rsp) jmp L$sqr8x_zero_start .p2align 5 .byte 0x66,0x66,0x66,0x2e,0x0f,0x1f,0x84,0x00,0x00,0x00,0x00,0x00 L$sqrx8x_zero: .byte 0x3e movdqa %xmm0,0(%rdi) movdqa %xmm0,16(%rdi) movdqa %xmm0,32(%rdi) movdqa %xmm0,48(%rdi) L$sqr8x_zero_start: movdqa %xmm0,64(%rdi) movdqa %xmm0,80(%rdi) movdqa %xmm0,96(%rdi) movdqa %xmm0,112(%rdi) leaq 128(%rdi),%rdi subq $64,%r9 jnz L$sqrx8x_zero movq 0(%rsi),%rdx xorq %r10,%r10 xorq %r11,%r11 xorq %r12,%r12 xorq %r13,%r13 xorq %r14,%r14 xorq %r15,%r15 leaq 48+8(%rsp),%rdi xorq %rbp,%rbp jmp L$sqrx8x_outer_loop .p2align 5 L$sqrx8x_outer_loop: mulxq 8(%rsi),%r8,%rax adcxq %r9,%r8 adoxq %rax,%r10 mulxq 16(%rsi),%r9,%rax adcxq %r10,%r9 adoxq %rax,%r11 .byte 0xc4,0xe2,0xab,0xf6,0x86,0x18,0x00,0x00,0x00 adcxq %r11,%r10 adoxq %rax,%r12 .byte 0xc4,0xe2,0xa3,0xf6,0x86,0x20,0x00,0x00,0x00 adcxq %r12,%r11 adoxq %rax,%r13 mulxq 40(%rsi),%r12,%rax adcxq %r13,%r12 adoxq %rax,%r14 mulxq 48(%rsi),%r13,%rax adcxq %r14,%r13 adoxq %r15,%rax mulxq 56(%rsi),%r14,%r15 movq 8(%rsi),%rdx adcxq %rax,%r14 adoxq %rbp,%r15 adcq 64(%rdi),%r15 movq %r8,8(%rdi) movq %r9,16(%rdi) sbbq %rcx,%rcx xorq %rbp,%rbp mulxq 16(%rsi),%r8,%rbx mulxq 24(%rsi),%r9,%rax adcxq %r10,%r8 adoxq %rbx,%r9 mulxq 32(%rsi),%r10,%rbx adcxq %r11,%r9 adoxq %rax,%r10 .byte 0xc4,0xe2,0xa3,0xf6,0x86,0x28,0x00,0x00,0x00 adcxq %r12,%r10 adoxq %rbx,%r11 .byte 0xc4,0xe2,0x9b,0xf6,0x9e,0x30,0x00,0x00,0x00 adcxq %r13,%r11 adoxq %r14,%r12 .byte 0xc4,0x62,0x93,0xf6,0xb6,0x38,0x00,0x00,0x00 movq 16(%rsi),%rdx adcxq %rax,%r12 adoxq %rbx,%r13 adcxq %r15,%r13 adoxq %rbp,%r14 adcxq %rbp,%r14 movq %r8,24(%rdi) movq %r9,32(%rdi) mulxq 24(%rsi),%r8,%rbx mulxq 32(%rsi),%r9,%rax adcxq %r10,%r8 adoxq %rbx,%r9 mulxq 40(%rsi),%r10,%rbx adcxq %r11,%r9 adoxq %rax,%r10 .byte 0xc4,0xe2,0xa3,0xf6,0x86,0x30,0x00,0x00,0x00 adcxq %r12,%r10 adoxq %r13,%r11 .byte 0xc4,0x62,0x9b,0xf6,0xae,0x38,0x00,0x00,0x00 .byte 0x3e movq 24(%rsi),%rdx adcxq %rbx,%r11 adoxq %rax,%r12 adcxq %r14,%r12 movq %r8,40(%rdi) movq %r9,48(%rdi) mulxq 32(%rsi),%r8,%rax adoxq %rbp,%r13 adcxq %rbp,%r13 mulxq 40(%rsi),%r9,%rbx adcxq %r10,%r8 adoxq %rax,%r9 mulxq 48(%rsi),%r10,%rax adcxq %r11,%r9 adoxq %r12,%r10 mulxq 56(%rsi),%r11,%r12 movq 32(%rsi),%rdx movq 40(%rsi),%r14 adcxq %rbx,%r10 adoxq %rax,%r11 movq 48(%rsi),%r15 adcxq %r13,%r11 adoxq %rbp,%r12 adcxq %rbp,%r12 movq %r8,56(%rdi) movq %r9,64(%rdi) mulxq %r14,%r9,%rax movq 56(%rsi),%r8 adcxq %r10,%r9 mulxq %r15,%r10,%rbx adoxq %rax,%r10 adcxq %r11,%r10 mulxq %r8,%r11,%rax movq %r14,%rdx adoxq %rbx,%r11 adcxq %r12,%r11 adcxq %rbp,%rax mulxq %r15,%r14,%rbx mulxq %r8,%r12,%r13 movq %r15,%rdx leaq 64(%rsi),%rsi adcxq %r14,%r11 adoxq %rbx,%r12 adcxq %rax,%r12 adoxq %rbp,%r13 .byte 0x67,0x67 mulxq %r8,%r8,%r14 adcxq %r8,%r13 adcxq %rbp,%r14 cmpq 8+8(%rsp),%rsi je L$sqrx8x_outer_break negq %rcx movq $-8,%rcx movq %rbp,%r15 movq 64(%rdi),%r8 adcxq 72(%rdi),%r9 adcxq 80(%rdi),%r10 adcxq 88(%rdi),%r11 adcq 96(%rdi),%r12 adcq 104(%rdi),%r13 adcq 112(%rdi),%r14 adcq 120(%rdi),%r15 leaq (%rsi),%rbp leaq 128(%rdi),%rdi sbbq %rax,%rax movq -64(%rsi),%rdx movq %rax,16+8(%rsp) movq %rdi,24+8(%rsp) xorl %eax,%eax jmp L$sqrx8x_loop .p2align 5 L$sqrx8x_loop: movq %r8,%rbx mulxq 0(%rbp),%rax,%r8 adcxq %rax,%rbx adoxq %r9,%r8 mulxq 8(%rbp),%rax,%r9 adcxq %rax,%r8 adoxq %r10,%r9 mulxq 16(%rbp),%rax,%r10 adcxq %rax,%r9 adoxq %r11,%r10 mulxq 24(%rbp),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 .byte 0xc4,0x62,0xfb,0xf6,0xa5,0x20,0x00,0x00,0x00 adcxq %rax,%r11 adoxq %r13,%r12 mulxq 40(%rbp),%rax,%r13 adcxq %rax,%r12 adoxq %r14,%r13 mulxq 48(%rbp),%rax,%r14 movq %rbx,(%rdi,%rcx,8) movl $0,%ebx adcxq %rax,%r13 adoxq %r15,%r14 .byte 0xc4,0x62,0xfb,0xf6,0xbd,0x38,0x00,0x00,0x00 movq 8(%rsi,%rcx,8),%rdx adcxq %rax,%r14 adoxq %rbx,%r15 adcxq %rbx,%r15 .byte 0x67 incq %rcx jnz L$sqrx8x_loop leaq 64(%rbp),%rbp movq $-8,%rcx cmpq 8+8(%rsp),%rbp je L$sqrx8x_break subq 16+8(%rsp),%rbx .byte 0x66 movq -64(%rsi),%rdx adcxq 0(%rdi),%r8 adcxq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 leaq 64(%rdi),%rdi .byte 0x67 sbbq %rax,%rax xorl %ebx,%ebx movq %rax,16+8(%rsp) jmp L$sqrx8x_loop .p2align 5 L$sqrx8x_break: xorq %rbp,%rbp subq 16+8(%rsp),%rbx adcxq %rbp,%r8 movq 24+8(%rsp),%rcx adcxq %rbp,%r9 movq 0(%rsi),%rdx adcq $0,%r10 movq %r8,0(%rdi) adcq $0,%r11 adcq $0,%r12 adcq $0,%r13 adcq $0,%r14 adcq $0,%r15 cmpq %rcx,%rdi je L$sqrx8x_outer_loop movq %r9,8(%rdi) movq 8(%rcx),%r9 movq %r10,16(%rdi) movq 16(%rcx),%r10 movq %r11,24(%rdi) movq 24(%rcx),%r11 movq %r12,32(%rdi) movq 32(%rcx),%r12 movq %r13,40(%rdi) movq 40(%rcx),%r13 movq %r14,48(%rdi) movq 48(%rcx),%r14 movq %r15,56(%rdi) movq 56(%rcx),%r15 movq %rcx,%rdi jmp L$sqrx8x_outer_loop .p2align 5 L$sqrx8x_outer_break: movq %r9,72(%rdi) .byte 102,72,15,126,217 movq %r10,80(%rdi) movq %r11,88(%rdi) movq %r12,96(%rdi) movq %r13,104(%rdi) movq %r14,112(%rdi) leaq 48+8(%rsp),%rdi movq (%rsi,%rcx,1),%rdx movq 8(%rdi),%r11 xorq %r10,%r10 movq 0+8(%rsp),%r9 adoxq %r11,%r11 movq 16(%rdi),%r12 movq 24(%rdi),%r13 .p2align 5 L$sqrx4x_shift_n_add: mulxq %rdx,%rax,%rbx adoxq %r12,%r12 adcxq %r10,%rax .byte 0x48,0x8b,0x94,0x0e,0x08,0x00,0x00,0x00 .byte 0x4c,0x8b,0x97,0x20,0x00,0x00,0x00 adoxq %r13,%r13 adcxq %r11,%rbx movq 40(%rdi),%r11 movq %rax,0(%rdi) movq %rbx,8(%rdi) mulxq %rdx,%rax,%rbx adoxq %r10,%r10 adcxq %r12,%rax movq 16(%rsi,%rcx,1),%rdx movq 48(%rdi),%r12 adoxq %r11,%r11 adcxq %r13,%rbx movq 56(%rdi),%r13 movq %rax,16(%rdi) movq %rbx,24(%rdi) mulxq %rdx,%rax,%rbx adoxq %r12,%r12 adcxq %r10,%rax movq 24(%rsi,%rcx,1),%rdx leaq 32(%rcx),%rcx movq 64(%rdi),%r10 adoxq %r13,%r13 adcxq %r11,%rbx movq 72(%rdi),%r11 movq %rax,32(%rdi) movq %rbx,40(%rdi) mulxq %rdx,%rax,%rbx adoxq %r10,%r10 adcxq %r12,%rax jrcxz L$sqrx4x_shift_n_add_break .byte 0x48,0x8b,0x94,0x0e,0x00,0x00,0x00,0x00 adoxq %r11,%r11 adcxq %r13,%rbx movq 80(%rdi),%r12 movq 88(%rdi),%r13 movq %rax,48(%rdi) movq %rbx,56(%rdi) leaq 64(%rdi),%rdi nop jmp L$sqrx4x_shift_n_add .p2align 5 L$sqrx4x_shift_n_add_break: adcxq %r13,%rbx movq %rax,48(%rdi) movq %rbx,56(%rdi) leaq 64(%rdi),%rdi .byte 102,72,15,126,213 __bn_sqrx8x_reduction: xorl %eax,%eax movq 32+8(%rsp),%rbx movq 48+8(%rsp),%rdx leaq -64(%rbp,%r9,1),%rcx movq %rcx,0+8(%rsp) movq %rdi,8+8(%rsp) leaq 48+8(%rsp),%rdi jmp L$sqrx8x_reduction_loop .p2align 5 L$sqrx8x_reduction_loop: movq 8(%rdi),%r9 movq 16(%rdi),%r10 movq 24(%rdi),%r11 movq 32(%rdi),%r12 movq %rdx,%r8 imulq %rbx,%rdx movq 40(%rdi),%r13 movq 48(%rdi),%r14 movq 56(%rdi),%r15 movq %rax,24+8(%rsp) leaq 64(%rdi),%rdi xorq %rsi,%rsi movq $-8,%rcx jmp L$sqrx8x_reduce .p2align 5 L$sqrx8x_reduce: movq %r8,%rbx mulxq 0(%rbp),%rax,%r8 adcxq %rbx,%rax adoxq %r9,%r8 mulxq 8(%rbp),%rbx,%r9 adcxq %rbx,%r8 adoxq %r10,%r9 mulxq 16(%rbp),%rbx,%r10 adcxq %rbx,%r9 adoxq %r11,%r10 mulxq 24(%rbp),%rbx,%r11 adcxq %rbx,%r10 adoxq %r12,%r11 .byte 0xc4,0x62,0xe3,0xf6,0xa5,0x20,0x00,0x00,0x00 movq %rdx,%rax movq %r8,%rdx adcxq %rbx,%r11 adoxq %r13,%r12 mulxq 32+8(%rsp),%rbx,%rdx movq %rax,%rdx movq %rax,64+48+8(%rsp,%rcx,8) mulxq 40(%rbp),%rax,%r13 adcxq %rax,%r12 adoxq %r14,%r13 mulxq 48(%rbp),%rax,%r14 adcxq %rax,%r13 adoxq %r15,%r14 mulxq 56(%rbp),%rax,%r15 movq %rbx,%rdx adcxq %rax,%r14 adoxq %rsi,%r15 adcxq %rsi,%r15 .byte 0x67,0x67,0x67 incq %rcx jnz L$sqrx8x_reduce movq %rsi,%rax cmpq 0+8(%rsp),%rbp jae L$sqrx8x_no_tail movq 48+8(%rsp),%rdx addq 0(%rdi),%r8 leaq 64(%rbp),%rbp movq $-8,%rcx adcxq 8(%rdi),%r9 adcxq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 leaq 64(%rdi),%rdi sbbq %rax,%rax xorq %rsi,%rsi movq %rax,16+8(%rsp) jmp L$sqrx8x_tail .p2align 5 L$sqrx8x_tail: movq %r8,%rbx mulxq 0(%rbp),%rax,%r8 adcxq %rax,%rbx adoxq %r9,%r8 mulxq 8(%rbp),%rax,%r9 adcxq %rax,%r8 adoxq %r10,%r9 mulxq 16(%rbp),%rax,%r10 adcxq %rax,%r9 adoxq %r11,%r10 mulxq 24(%rbp),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 .byte 0xc4,0x62,0xfb,0xf6,0xa5,0x20,0x00,0x00,0x00 adcxq %rax,%r11 adoxq %r13,%r12 mulxq 40(%rbp),%rax,%r13 adcxq %rax,%r12 adoxq %r14,%r13 mulxq 48(%rbp),%rax,%r14 adcxq %rax,%r13 adoxq %r15,%r14 mulxq 56(%rbp),%rax,%r15 movq 72+48+8(%rsp,%rcx,8),%rdx adcxq %rax,%r14 adoxq %rsi,%r15 movq %rbx,(%rdi,%rcx,8) movq %r8,%rbx adcxq %rsi,%r15 incq %rcx jnz L$sqrx8x_tail cmpq 0+8(%rsp),%rbp jae L$sqrx8x_tail_done subq 16+8(%rsp),%rsi movq 48+8(%rsp),%rdx leaq 64(%rbp),%rbp adcq 0(%rdi),%r8 adcq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 leaq 64(%rdi),%rdi sbbq %rax,%rax subq $8,%rcx xorq %rsi,%rsi movq %rax,16+8(%rsp) jmp L$sqrx8x_tail .p2align 5 L$sqrx8x_tail_done: xorq %rax,%rax addq 24+8(%rsp),%r8 adcq $0,%r9 adcq $0,%r10 adcq $0,%r11 adcq $0,%r12 adcq $0,%r13 adcq $0,%r14 adcq $0,%r15 adcq $0,%rax subq 16+8(%rsp),%rsi L$sqrx8x_no_tail: adcq 0(%rdi),%r8 .byte 102,72,15,126,217 adcq 8(%rdi),%r9 movq 56(%rbp),%rsi .byte 102,72,15,126,213 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 adcq $0,%rax movq 32+8(%rsp),%rbx movq 64(%rdi,%rcx,1),%rdx movq %r8,0(%rdi) leaq 64(%rdi),%r8 movq %r9,8(%rdi) movq %r10,16(%rdi) movq %r11,24(%rdi) movq %r12,32(%rdi) movq %r13,40(%rdi) movq %r14,48(%rdi) movq %r15,56(%rdi) leaq 64(%rdi,%rcx,1),%rdi cmpq 8+8(%rsp),%r8 jb L$sqrx8x_reduction_loop .byte 0xf3,0xc3 .p2align 5 __bn_postx4x_internal: movq 0(%rbp),%r12 movq %rcx,%r10 movq %rcx,%r9 negq %rax sarq $3+2,%rcx .byte 102,72,15,126,202 .byte 102,72,15,126,206 decq %r12 movq 8(%rbp),%r13 xorq %r8,%r8 movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp L$sqrx4x_sub_entry .p2align 4 L$sqrx4x_sub: movq 0(%rbp),%r12 movq 8(%rbp),%r13 movq 16(%rbp),%r14 movq 24(%rbp),%r15 L$sqrx4x_sub_entry: andnq %rax,%r12,%r12 leaq 32(%rbp),%rbp andnq %rax,%r13,%r13 andnq %rax,%r14,%r14 andnq %rax,%r15,%r15 negq %r8 adcq 0(%rdi),%r12 adcq 8(%rdi),%r13 adcq 16(%rdi),%r14 adcq 24(%rdi),%r15 movq %r12,0(%rdx) leaq 32(%rdi),%rdi movq %r13,8(%rdx) sbbq %r8,%r8 movq %r14,16(%rdx) movq %r15,24(%rdx) leaq 32(%rdx),%rdx incq %rcx jnz L$sqrx4x_sub negq %r9 .byte 0xf3,0xc3 #endif .globl _bn_scatter5 .private_extern _bn_scatter5 .p2align 4 _bn_scatter5: _CET_ENDBR cmpl $0,%esi jz L$scatter_epilogue leaq (%rdx,%rcx,8),%rdx L$scatter: movq (%rdi),%rax leaq 8(%rdi),%rdi movq %rax,(%rdx) leaq 256(%rdx),%rdx subl $1,%esi jnz L$scatter L$scatter_epilogue: .byte 0xf3,0xc3 .globl _bn_gather5 .private_extern _bn_gather5 .p2align 5 _bn_gather5: L$SEH_begin_bn_gather5: _CET_ENDBR .byte 0x4c,0x8d,0x14,0x24 .byte 0x48,0x81,0xec,0x08,0x01,0x00,0x00 leaq L$inc(%rip),%rax andq $-16,%rsp movd %ecx,%xmm5 movdqa 0(%rax),%xmm0 movdqa 16(%rax),%xmm1 leaq 128(%rdx),%r11 leaq 128(%rsp),%rax pshufd $0,%xmm5,%xmm5 movdqa %xmm1,%xmm4 movdqa %xmm1,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,-128(%rax) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,-112(%rax) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,-96(%rax) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,-80(%rax) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,-64(%rax) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,-48(%rax) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,-32(%rax) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,-16(%rax) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,0(%rax) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,16(%rax) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,32(%rax) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,48(%rax) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,64(%rax) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,80(%rax) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,96(%rax) movdqa %xmm4,%xmm2 movdqa %xmm3,112(%rax) jmp L$gather .p2align 5 L$gather: pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 movdqa -128(%r11),%xmm0 movdqa -112(%r11),%xmm1 movdqa -96(%r11),%xmm2 pand -128(%rax),%xmm0 movdqa -80(%r11),%xmm3 pand -112(%rax),%xmm1 por %xmm0,%xmm4 pand -96(%rax),%xmm2 por %xmm1,%xmm5 pand -80(%rax),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa -64(%r11),%xmm0 movdqa -48(%r11),%xmm1 movdqa -32(%r11),%xmm2 pand -64(%rax),%xmm0 movdqa -16(%r11),%xmm3 pand -48(%rax),%xmm1 por %xmm0,%xmm4 pand -32(%rax),%xmm2 por %xmm1,%xmm5 pand -16(%rax),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 0(%r11),%xmm0 movdqa 16(%r11),%xmm1 movdqa 32(%r11),%xmm2 pand 0(%rax),%xmm0 movdqa 48(%r11),%xmm3 pand 16(%rax),%xmm1 por %xmm0,%xmm4 pand 32(%rax),%xmm2 por %xmm1,%xmm5 pand 48(%rax),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 64(%r11),%xmm0 movdqa 80(%r11),%xmm1 movdqa 96(%r11),%xmm2 pand 64(%rax),%xmm0 movdqa 112(%r11),%xmm3 pand 80(%rax),%xmm1 por %xmm0,%xmm4 pand 96(%rax),%xmm2 por %xmm1,%xmm5 pand 112(%rax),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 por %xmm5,%xmm4 leaq 256(%r11),%r11 pshufd $0x4e,%xmm4,%xmm0 por %xmm4,%xmm0 movq %xmm0,(%rdi) leaq 8(%rdi),%rdi subl $1,%esi jnz L$gather leaq (%r10),%rsp .byte 0xf3,0xc3 L$SEH_end_bn_gather5: .section __DATA,__const .p2align 6 L$inc: .long 0,0, 1,1 .long 2,2, 2,2 .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,119,105,116,104,32,115,99,97,116,116,101,114,47,103,97,116,104,101,114,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text #endif
marvin-hansen/iggy-streaming-system
38,151
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/rsaz-avx2.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _rsaz_1024_sqr_avx2 .private_extern _rsaz_1024_sqr_avx2 .p2align 6 _rsaz_1024_sqr_avx2: _CET_ENDBR leaq (%rsp),%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 vzeroupper movq %rax,%rbp movq %rdx,%r13 subq $832,%rsp movq %r13,%r15 subq $-128,%rdi subq $-128,%rsi subq $-128,%r13 andq $4095,%r15 addq $320,%r15 shrq $12,%r15 vpxor %ymm9,%ymm9,%ymm9 jz L$sqr_1024_no_n_copy subq $320,%rsp vmovdqu 0-128(%r13),%ymm0 andq $-2048,%rsp vmovdqu 32-128(%r13),%ymm1 vmovdqu 64-128(%r13),%ymm2 vmovdqu 96-128(%r13),%ymm3 vmovdqu 128-128(%r13),%ymm4 vmovdqu 160-128(%r13),%ymm5 vmovdqu 192-128(%r13),%ymm6 vmovdqu 224-128(%r13),%ymm7 vmovdqu 256-128(%r13),%ymm8 leaq 832+128(%rsp),%r13 vmovdqu %ymm0,0-128(%r13) vmovdqu %ymm1,32-128(%r13) vmovdqu %ymm2,64-128(%r13) vmovdqu %ymm3,96-128(%r13) vmovdqu %ymm4,128-128(%r13) vmovdqu %ymm5,160-128(%r13) vmovdqu %ymm6,192-128(%r13) vmovdqu %ymm7,224-128(%r13) vmovdqu %ymm8,256-128(%r13) vmovdqu %ymm9,288-128(%r13) L$sqr_1024_no_n_copy: andq $-1024,%rsp vmovdqu 32-128(%rsi),%ymm1 vmovdqu 64-128(%rsi),%ymm2 vmovdqu 96-128(%rsi),%ymm3 vmovdqu 128-128(%rsi),%ymm4 vmovdqu 160-128(%rsi),%ymm5 vmovdqu 192-128(%rsi),%ymm6 vmovdqu 224-128(%rsi),%ymm7 vmovdqu 256-128(%rsi),%ymm8 leaq 192(%rsp),%rbx vmovdqu L$and_mask(%rip),%ymm15 jmp L$OOP_GRANDE_SQR_1024 .p2align 5 L$OOP_GRANDE_SQR_1024: leaq 576+128(%rsp),%r9 leaq 448(%rsp),%r12 vpaddq %ymm1,%ymm1,%ymm1 vpbroadcastq 0-128(%rsi),%ymm10 vpaddq %ymm2,%ymm2,%ymm2 vmovdqa %ymm1,0-128(%r9) vpaddq %ymm3,%ymm3,%ymm3 vmovdqa %ymm2,32-128(%r9) vpaddq %ymm4,%ymm4,%ymm4 vmovdqa %ymm3,64-128(%r9) vpaddq %ymm5,%ymm5,%ymm5 vmovdqa %ymm4,96-128(%r9) vpaddq %ymm6,%ymm6,%ymm6 vmovdqa %ymm5,128-128(%r9) vpaddq %ymm7,%ymm7,%ymm7 vmovdqa %ymm6,160-128(%r9) vpaddq %ymm8,%ymm8,%ymm8 vmovdqa %ymm7,192-128(%r9) vpxor %ymm9,%ymm9,%ymm9 vmovdqa %ymm8,224-128(%r9) vpmuludq 0-128(%rsi),%ymm10,%ymm0 vpbroadcastq 32-128(%rsi),%ymm11 vmovdqu %ymm9,288-192(%rbx) vpmuludq %ymm10,%ymm1,%ymm1 vmovdqu %ymm9,320-448(%r12) vpmuludq %ymm10,%ymm2,%ymm2 vmovdqu %ymm9,352-448(%r12) vpmuludq %ymm10,%ymm3,%ymm3 vmovdqu %ymm9,384-448(%r12) vpmuludq %ymm10,%ymm4,%ymm4 vmovdqu %ymm9,416-448(%r12) vpmuludq %ymm10,%ymm5,%ymm5 vmovdqu %ymm9,448-448(%r12) vpmuludq %ymm10,%ymm6,%ymm6 vmovdqu %ymm9,480-448(%r12) vpmuludq %ymm10,%ymm7,%ymm7 vmovdqu %ymm9,512-448(%r12) vpmuludq %ymm10,%ymm8,%ymm8 vpbroadcastq 64-128(%rsi),%ymm10 vmovdqu %ymm9,544-448(%r12) movq %rsi,%r15 movl $4,%r14d jmp L$sqr_entry_1024 .p2align 5 L$OOP_SQR_1024: vpbroadcastq 32-128(%r15),%ymm11 vpmuludq 0-128(%rsi),%ymm10,%ymm0 vpaddq 0-192(%rbx),%ymm0,%ymm0 vpmuludq 0-128(%r9),%ymm10,%ymm1 vpaddq 32-192(%rbx),%ymm1,%ymm1 vpmuludq 32-128(%r9),%ymm10,%ymm2 vpaddq 64-192(%rbx),%ymm2,%ymm2 vpmuludq 64-128(%r9),%ymm10,%ymm3 vpaddq 96-192(%rbx),%ymm3,%ymm3 vpmuludq 96-128(%r9),%ymm10,%ymm4 vpaddq 128-192(%rbx),%ymm4,%ymm4 vpmuludq 128-128(%r9),%ymm10,%ymm5 vpaddq 160-192(%rbx),%ymm5,%ymm5 vpmuludq 160-128(%r9),%ymm10,%ymm6 vpaddq 192-192(%rbx),%ymm6,%ymm6 vpmuludq 192-128(%r9),%ymm10,%ymm7 vpaddq 224-192(%rbx),%ymm7,%ymm7 vpmuludq 224-128(%r9),%ymm10,%ymm8 vpbroadcastq 64-128(%r15),%ymm10 vpaddq 256-192(%rbx),%ymm8,%ymm8 L$sqr_entry_1024: vmovdqu %ymm0,0-192(%rbx) vmovdqu %ymm1,32-192(%rbx) vpmuludq 32-128(%rsi),%ymm11,%ymm12 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq 32-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm3,%ymm3 vpmuludq 64-128(%r9),%ymm11,%ymm13 vpaddq %ymm13,%ymm4,%ymm4 vpmuludq 96-128(%r9),%ymm11,%ymm12 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq 128-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm6,%ymm6 vpmuludq 160-128(%r9),%ymm11,%ymm13 vpaddq %ymm13,%ymm7,%ymm7 vpmuludq 192-128(%r9),%ymm11,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq 224-128(%r9),%ymm11,%ymm0 vpbroadcastq 96-128(%r15),%ymm11 vpaddq 288-192(%rbx),%ymm0,%ymm0 vmovdqu %ymm2,64-192(%rbx) vmovdqu %ymm3,96-192(%rbx) vpmuludq 64-128(%rsi),%ymm10,%ymm13 vpaddq %ymm13,%ymm4,%ymm4 vpmuludq 64-128(%r9),%ymm10,%ymm12 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq 96-128(%r9),%ymm10,%ymm14 vpaddq %ymm14,%ymm6,%ymm6 vpmuludq 128-128(%r9),%ymm10,%ymm13 vpaddq %ymm13,%ymm7,%ymm7 vpmuludq 160-128(%r9),%ymm10,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq 192-128(%r9),%ymm10,%ymm14 vpaddq %ymm14,%ymm0,%ymm0 vpmuludq 224-128(%r9),%ymm10,%ymm1 vpbroadcastq 128-128(%r15),%ymm10 vpaddq 320-448(%r12),%ymm1,%ymm1 vmovdqu %ymm4,128-192(%rbx) vmovdqu %ymm5,160-192(%rbx) vpmuludq 96-128(%rsi),%ymm11,%ymm12 vpaddq %ymm12,%ymm6,%ymm6 vpmuludq 96-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm7,%ymm7 vpmuludq 128-128(%r9),%ymm11,%ymm13 vpaddq %ymm13,%ymm8,%ymm8 vpmuludq 160-128(%r9),%ymm11,%ymm12 vpaddq %ymm12,%ymm0,%ymm0 vpmuludq 192-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm1,%ymm1 vpmuludq 224-128(%r9),%ymm11,%ymm2 vpbroadcastq 160-128(%r15),%ymm11 vpaddq 352-448(%r12),%ymm2,%ymm2 vmovdqu %ymm6,192-192(%rbx) vmovdqu %ymm7,224-192(%rbx) vpmuludq 128-128(%rsi),%ymm10,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq 128-128(%r9),%ymm10,%ymm14 vpaddq %ymm14,%ymm0,%ymm0 vpmuludq 160-128(%r9),%ymm10,%ymm13 vpaddq %ymm13,%ymm1,%ymm1 vpmuludq 192-128(%r9),%ymm10,%ymm12 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq 224-128(%r9),%ymm10,%ymm3 vpbroadcastq 192-128(%r15),%ymm10 vpaddq 384-448(%r12),%ymm3,%ymm3 vmovdqu %ymm8,256-192(%rbx) vmovdqu %ymm0,288-192(%rbx) leaq 8(%rbx),%rbx vpmuludq 160-128(%rsi),%ymm11,%ymm13 vpaddq %ymm13,%ymm1,%ymm1 vpmuludq 160-128(%r9),%ymm11,%ymm12 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq 192-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm3,%ymm3 vpmuludq 224-128(%r9),%ymm11,%ymm4 vpbroadcastq 224-128(%r15),%ymm11 vpaddq 416-448(%r12),%ymm4,%ymm4 vmovdqu %ymm1,320-448(%r12) vmovdqu %ymm2,352-448(%r12) vpmuludq 192-128(%rsi),%ymm10,%ymm12 vpaddq %ymm12,%ymm3,%ymm3 vpmuludq 192-128(%r9),%ymm10,%ymm14 vpbroadcastq 256-128(%r15),%ymm0 vpaddq %ymm14,%ymm4,%ymm4 vpmuludq 224-128(%r9),%ymm10,%ymm5 vpbroadcastq 0+8-128(%r15),%ymm10 vpaddq 448-448(%r12),%ymm5,%ymm5 vmovdqu %ymm3,384-448(%r12) vmovdqu %ymm4,416-448(%r12) leaq 8(%r15),%r15 vpmuludq 224-128(%rsi),%ymm11,%ymm12 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq 224-128(%r9),%ymm11,%ymm6 vpaddq 480-448(%r12),%ymm6,%ymm6 vpmuludq 256-128(%rsi),%ymm0,%ymm7 vmovdqu %ymm5,448-448(%r12) vpaddq 512-448(%r12),%ymm7,%ymm7 vmovdqu %ymm6,480-448(%r12) vmovdqu %ymm7,512-448(%r12) leaq 8(%r12),%r12 decl %r14d jnz L$OOP_SQR_1024 vmovdqu 256(%rsp),%ymm8 vmovdqu 288(%rsp),%ymm1 vmovdqu 320(%rsp),%ymm2 leaq 192(%rsp),%rbx vpsrlq $29,%ymm8,%ymm14 vpand %ymm15,%ymm8,%ymm8 vpsrlq $29,%ymm1,%ymm11 vpand %ymm15,%ymm1,%ymm1 vpermq $0x93,%ymm14,%ymm14 vpxor %ymm9,%ymm9,%ymm9 vpermq $0x93,%ymm11,%ymm11 vpblendd $3,%ymm9,%ymm14,%ymm10 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm8,%ymm8 vpblendd $3,%ymm11,%ymm9,%ymm11 vpaddq %ymm14,%ymm1,%ymm1 vpaddq %ymm11,%ymm2,%ymm2 vmovdqu %ymm1,288-192(%rbx) vmovdqu %ymm2,320-192(%rbx) movq (%rsp),%rax movq 8(%rsp),%r10 movq 16(%rsp),%r11 movq 24(%rsp),%r12 vmovdqu 32(%rsp),%ymm1 vmovdqu 64-192(%rbx),%ymm2 vmovdqu 96-192(%rbx),%ymm3 vmovdqu 128-192(%rbx),%ymm4 vmovdqu 160-192(%rbx),%ymm5 vmovdqu 192-192(%rbx),%ymm6 vmovdqu 224-192(%rbx),%ymm7 movq %rax,%r9 imull %ecx,%eax andl $0x1fffffff,%eax vmovd %eax,%xmm12 movq %rax,%rdx imulq -128(%r13),%rax vpbroadcastq %xmm12,%ymm12 addq %rax,%r9 movq %rdx,%rax imulq 8-128(%r13),%rax shrq $29,%r9 addq %rax,%r10 movq %rdx,%rax imulq 16-128(%r13),%rax addq %r9,%r10 addq %rax,%r11 imulq 24-128(%r13),%rdx addq %rdx,%r12 movq %r10,%rax imull %ecx,%eax andl $0x1fffffff,%eax movl $9,%r14d jmp L$OOP_REDUCE_1024 .p2align 5 L$OOP_REDUCE_1024: vmovd %eax,%xmm13 vpbroadcastq %xmm13,%ymm13 vpmuludq 32-128(%r13),%ymm12,%ymm10 movq %rax,%rdx imulq -128(%r13),%rax vpaddq %ymm10,%ymm1,%ymm1 addq %rax,%r10 vpmuludq 64-128(%r13),%ymm12,%ymm14 movq %rdx,%rax imulq 8-128(%r13),%rax vpaddq %ymm14,%ymm2,%ymm2 vpmuludq 96-128(%r13),%ymm12,%ymm11 .byte 0x67 addq %rax,%r11 .byte 0x67 movq %rdx,%rax imulq 16-128(%r13),%rax shrq $29,%r10 vpaddq %ymm11,%ymm3,%ymm3 vpmuludq 128-128(%r13),%ymm12,%ymm10 addq %rax,%r12 addq %r10,%r11 vpaddq %ymm10,%ymm4,%ymm4 vpmuludq 160-128(%r13),%ymm12,%ymm14 movq %r11,%rax imull %ecx,%eax vpaddq %ymm14,%ymm5,%ymm5 vpmuludq 192-128(%r13),%ymm12,%ymm11 andl $0x1fffffff,%eax vpaddq %ymm11,%ymm6,%ymm6 vpmuludq 224-128(%r13),%ymm12,%ymm10 vpaddq %ymm10,%ymm7,%ymm7 vpmuludq 256-128(%r13),%ymm12,%ymm14 vmovd %eax,%xmm12 vpaddq %ymm14,%ymm8,%ymm8 vpbroadcastq %xmm12,%ymm12 vpmuludq 32-8-128(%r13),%ymm13,%ymm11 vmovdqu 96-8-128(%r13),%ymm14 movq %rax,%rdx imulq -128(%r13),%rax vpaddq %ymm11,%ymm1,%ymm1 vpmuludq 64-8-128(%r13),%ymm13,%ymm10 vmovdqu 128-8-128(%r13),%ymm11 addq %rax,%r11 movq %rdx,%rax imulq 8-128(%r13),%rax vpaddq %ymm10,%ymm2,%ymm2 addq %r12,%rax shrq $29,%r11 vpmuludq %ymm13,%ymm14,%ymm14 vmovdqu 160-8-128(%r13),%ymm10 addq %r11,%rax vpaddq %ymm14,%ymm3,%ymm3 vpmuludq %ymm13,%ymm11,%ymm11 vmovdqu 192-8-128(%r13),%ymm14 .byte 0x67 movq %rax,%r12 imull %ecx,%eax vpaddq %ymm11,%ymm4,%ymm4 vpmuludq %ymm13,%ymm10,%ymm10 .byte 0xc4,0x41,0x7e,0x6f,0x9d,0x58,0x00,0x00,0x00 andl $0x1fffffff,%eax vpaddq %ymm10,%ymm5,%ymm5 vpmuludq %ymm13,%ymm14,%ymm14 vmovdqu 256-8-128(%r13),%ymm10 vpaddq %ymm14,%ymm6,%ymm6 vpmuludq %ymm13,%ymm11,%ymm11 vmovdqu 288-8-128(%r13),%ymm9 vmovd %eax,%xmm0 imulq -128(%r13),%rax vpaddq %ymm11,%ymm7,%ymm7 vpmuludq %ymm13,%ymm10,%ymm10 vmovdqu 32-16-128(%r13),%ymm14 vpbroadcastq %xmm0,%ymm0 vpaddq %ymm10,%ymm8,%ymm8 vpmuludq %ymm13,%ymm9,%ymm9 vmovdqu 64-16-128(%r13),%ymm11 addq %rax,%r12 vmovdqu 32-24-128(%r13),%ymm13 vpmuludq %ymm12,%ymm14,%ymm14 vmovdqu 96-16-128(%r13),%ymm10 vpaddq %ymm14,%ymm1,%ymm1 vpmuludq %ymm0,%ymm13,%ymm13 vpmuludq %ymm12,%ymm11,%ymm11 .byte 0xc4,0x41,0x7e,0x6f,0xb5,0xf0,0xff,0xff,0xff vpaddq %ymm1,%ymm13,%ymm13 vpaddq %ymm11,%ymm2,%ymm2 vpmuludq %ymm12,%ymm10,%ymm10 vmovdqu 160-16-128(%r13),%ymm11 .byte 0x67 vmovq %xmm13,%rax vmovdqu %ymm13,(%rsp) vpaddq %ymm10,%ymm3,%ymm3 vpmuludq %ymm12,%ymm14,%ymm14 vmovdqu 192-16-128(%r13),%ymm10 vpaddq %ymm14,%ymm4,%ymm4 vpmuludq %ymm12,%ymm11,%ymm11 vmovdqu 224-16-128(%r13),%ymm14 vpaddq %ymm11,%ymm5,%ymm5 vpmuludq %ymm12,%ymm10,%ymm10 vmovdqu 256-16-128(%r13),%ymm11 vpaddq %ymm10,%ymm6,%ymm6 vpmuludq %ymm12,%ymm14,%ymm14 shrq $29,%r12 vmovdqu 288-16-128(%r13),%ymm10 addq %r12,%rax vpaddq %ymm14,%ymm7,%ymm7 vpmuludq %ymm12,%ymm11,%ymm11 movq %rax,%r9 imull %ecx,%eax vpaddq %ymm11,%ymm8,%ymm8 vpmuludq %ymm12,%ymm10,%ymm10 andl $0x1fffffff,%eax vmovd %eax,%xmm12 vmovdqu 96-24-128(%r13),%ymm11 .byte 0x67 vpaddq %ymm10,%ymm9,%ymm9 vpbroadcastq %xmm12,%ymm12 vpmuludq 64-24-128(%r13),%ymm0,%ymm14 vmovdqu 128-24-128(%r13),%ymm10 movq %rax,%rdx imulq -128(%r13),%rax movq 8(%rsp),%r10 vpaddq %ymm14,%ymm2,%ymm1 vpmuludq %ymm0,%ymm11,%ymm11 vmovdqu 160-24-128(%r13),%ymm14 addq %rax,%r9 movq %rdx,%rax imulq 8-128(%r13),%rax .byte 0x67 shrq $29,%r9 movq 16(%rsp),%r11 vpaddq %ymm11,%ymm3,%ymm2 vpmuludq %ymm0,%ymm10,%ymm10 vmovdqu 192-24-128(%r13),%ymm11 addq %rax,%r10 movq %rdx,%rax imulq 16-128(%r13),%rax vpaddq %ymm10,%ymm4,%ymm3 vpmuludq %ymm0,%ymm14,%ymm14 vmovdqu 224-24-128(%r13),%ymm10 imulq 24-128(%r13),%rdx addq %rax,%r11 leaq (%r9,%r10,1),%rax vpaddq %ymm14,%ymm5,%ymm4 vpmuludq %ymm0,%ymm11,%ymm11 vmovdqu 256-24-128(%r13),%ymm14 movq %rax,%r10 imull %ecx,%eax vpmuludq %ymm0,%ymm10,%ymm10 vpaddq %ymm11,%ymm6,%ymm5 vmovdqu 288-24-128(%r13),%ymm11 andl $0x1fffffff,%eax vpaddq %ymm10,%ymm7,%ymm6 vpmuludq %ymm0,%ymm14,%ymm14 addq 24(%rsp),%rdx vpaddq %ymm14,%ymm8,%ymm7 vpmuludq %ymm0,%ymm11,%ymm11 vpaddq %ymm11,%ymm9,%ymm8 vmovq %r12,%xmm9 movq %rdx,%r12 decl %r14d jnz L$OOP_REDUCE_1024 leaq 448(%rsp),%r12 vpaddq %ymm9,%ymm13,%ymm0 vpxor %ymm9,%ymm9,%ymm9 vpaddq 288-192(%rbx),%ymm0,%ymm0 vpaddq 320-448(%r12),%ymm1,%ymm1 vpaddq 352-448(%r12),%ymm2,%ymm2 vpaddq 384-448(%r12),%ymm3,%ymm3 vpaddq 416-448(%r12),%ymm4,%ymm4 vpaddq 448-448(%r12),%ymm5,%ymm5 vpaddq 480-448(%r12),%ymm6,%ymm6 vpaddq 512-448(%r12),%ymm7,%ymm7 vpaddq 544-448(%r12),%ymm8,%ymm8 vpsrlq $29,%ymm0,%ymm14 vpand %ymm15,%ymm0,%ymm0 vpsrlq $29,%ymm1,%ymm11 vpand %ymm15,%ymm1,%ymm1 vpsrlq $29,%ymm2,%ymm12 vpermq $0x93,%ymm14,%ymm14 vpand %ymm15,%ymm2,%ymm2 vpsrlq $29,%ymm3,%ymm13 vpermq $0x93,%ymm11,%ymm11 vpand %ymm15,%ymm3,%ymm3 vpermq $0x93,%ymm12,%ymm12 vpblendd $3,%ymm9,%ymm14,%ymm10 vpermq $0x93,%ymm13,%ymm13 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm0,%ymm0 vpblendd $3,%ymm11,%ymm12,%ymm11 vpaddq %ymm14,%ymm1,%ymm1 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm11,%ymm2,%ymm2 vpblendd $3,%ymm13,%ymm9,%ymm13 vpaddq %ymm12,%ymm3,%ymm3 vpaddq %ymm13,%ymm4,%ymm4 vpsrlq $29,%ymm0,%ymm14 vpand %ymm15,%ymm0,%ymm0 vpsrlq $29,%ymm1,%ymm11 vpand %ymm15,%ymm1,%ymm1 vpsrlq $29,%ymm2,%ymm12 vpermq $0x93,%ymm14,%ymm14 vpand %ymm15,%ymm2,%ymm2 vpsrlq $29,%ymm3,%ymm13 vpermq $0x93,%ymm11,%ymm11 vpand %ymm15,%ymm3,%ymm3 vpermq $0x93,%ymm12,%ymm12 vpblendd $3,%ymm9,%ymm14,%ymm10 vpermq $0x93,%ymm13,%ymm13 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm0,%ymm0 vpblendd $3,%ymm11,%ymm12,%ymm11 vpaddq %ymm14,%ymm1,%ymm1 vmovdqu %ymm0,0-128(%rdi) vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm11,%ymm2,%ymm2 vmovdqu %ymm1,32-128(%rdi) vpblendd $3,%ymm13,%ymm9,%ymm13 vpaddq %ymm12,%ymm3,%ymm3 vmovdqu %ymm2,64-128(%rdi) vpaddq %ymm13,%ymm4,%ymm4 vmovdqu %ymm3,96-128(%rdi) vpsrlq $29,%ymm4,%ymm14 vpand %ymm15,%ymm4,%ymm4 vpsrlq $29,%ymm5,%ymm11 vpand %ymm15,%ymm5,%ymm5 vpsrlq $29,%ymm6,%ymm12 vpermq $0x93,%ymm14,%ymm14 vpand %ymm15,%ymm6,%ymm6 vpsrlq $29,%ymm7,%ymm13 vpermq $0x93,%ymm11,%ymm11 vpand %ymm15,%ymm7,%ymm7 vpsrlq $29,%ymm8,%ymm0 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm8,%ymm8 vpermq $0x93,%ymm13,%ymm13 vpblendd $3,%ymm9,%ymm14,%ymm10 vpermq $0x93,%ymm0,%ymm0 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm4,%ymm4 vpblendd $3,%ymm11,%ymm12,%ymm11 vpaddq %ymm14,%ymm5,%ymm5 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm11,%ymm6,%ymm6 vpblendd $3,%ymm13,%ymm0,%ymm13 vpaddq %ymm12,%ymm7,%ymm7 vpaddq %ymm13,%ymm8,%ymm8 vpsrlq $29,%ymm4,%ymm14 vpand %ymm15,%ymm4,%ymm4 vpsrlq $29,%ymm5,%ymm11 vpand %ymm15,%ymm5,%ymm5 vpsrlq $29,%ymm6,%ymm12 vpermq $0x93,%ymm14,%ymm14 vpand %ymm15,%ymm6,%ymm6 vpsrlq $29,%ymm7,%ymm13 vpermq $0x93,%ymm11,%ymm11 vpand %ymm15,%ymm7,%ymm7 vpsrlq $29,%ymm8,%ymm0 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm8,%ymm8 vpermq $0x93,%ymm13,%ymm13 vpblendd $3,%ymm9,%ymm14,%ymm10 vpermq $0x93,%ymm0,%ymm0 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm4,%ymm4 vpblendd $3,%ymm11,%ymm12,%ymm11 vpaddq %ymm14,%ymm5,%ymm5 vmovdqu %ymm4,128-128(%rdi) vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm11,%ymm6,%ymm6 vmovdqu %ymm5,160-128(%rdi) vpblendd $3,%ymm13,%ymm0,%ymm13 vpaddq %ymm12,%ymm7,%ymm7 vmovdqu %ymm6,192-128(%rdi) vpaddq %ymm13,%ymm8,%ymm8 vmovdqu %ymm7,224-128(%rdi) vmovdqu %ymm8,256-128(%rdi) movq %rdi,%rsi decl %r8d jne L$OOP_GRANDE_SQR_1024 vzeroall movq %rbp,%rax movq -48(%rax),%r15 movq -40(%rax),%r14 movq -32(%rax),%r13 movq -24(%rax),%r12 movq -16(%rax),%rbp movq -8(%rax),%rbx leaq (%rax),%rsp L$sqr_1024_epilogue: .byte 0xf3,0xc3 .globl _rsaz_1024_mul_avx2 .private_extern _rsaz_1024_mul_avx2 .p2align 6 _rsaz_1024_mul_avx2: _CET_ENDBR leaq (%rsp),%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 movq %rax,%rbp vzeroall movq %rdx,%r13 subq $64,%rsp .byte 0x67,0x67 movq %rsi,%r15 andq $4095,%r15 addq $320,%r15 shrq $12,%r15 movq %rsi,%r15 cmovnzq %r13,%rsi cmovnzq %r15,%r13 movq %rcx,%r15 subq $-128,%rsi subq $-128,%rcx subq $-128,%rdi andq $4095,%r15 addq $320,%r15 .byte 0x67,0x67 shrq $12,%r15 jz L$mul_1024_no_n_copy subq $320,%rsp vmovdqu 0-128(%rcx),%ymm0 andq $-512,%rsp vmovdqu 32-128(%rcx),%ymm1 vmovdqu 64-128(%rcx),%ymm2 vmovdqu 96-128(%rcx),%ymm3 vmovdqu 128-128(%rcx),%ymm4 vmovdqu 160-128(%rcx),%ymm5 vmovdqu 192-128(%rcx),%ymm6 vmovdqu 224-128(%rcx),%ymm7 vmovdqu 256-128(%rcx),%ymm8 leaq 64+128(%rsp),%rcx vmovdqu %ymm0,0-128(%rcx) vpxor %ymm0,%ymm0,%ymm0 vmovdqu %ymm1,32-128(%rcx) vpxor %ymm1,%ymm1,%ymm1 vmovdqu %ymm2,64-128(%rcx) vpxor %ymm2,%ymm2,%ymm2 vmovdqu %ymm3,96-128(%rcx) vpxor %ymm3,%ymm3,%ymm3 vmovdqu %ymm4,128-128(%rcx) vpxor %ymm4,%ymm4,%ymm4 vmovdqu %ymm5,160-128(%rcx) vpxor %ymm5,%ymm5,%ymm5 vmovdqu %ymm6,192-128(%rcx) vpxor %ymm6,%ymm6,%ymm6 vmovdqu %ymm7,224-128(%rcx) vpxor %ymm7,%ymm7,%ymm7 vmovdqu %ymm8,256-128(%rcx) vmovdqa %ymm0,%ymm8 vmovdqu %ymm9,288-128(%rcx) L$mul_1024_no_n_copy: andq $-64,%rsp movq (%r13),%rbx vpbroadcastq (%r13),%ymm10 vmovdqu %ymm0,(%rsp) xorq %r9,%r9 .byte 0x67 xorq %r10,%r10 xorq %r11,%r11 xorq %r12,%r12 vmovdqu L$and_mask(%rip),%ymm15 movl $9,%r14d vmovdqu %ymm9,288-128(%rdi) jmp L$oop_mul_1024 .p2align 5 L$oop_mul_1024: vpsrlq $29,%ymm3,%ymm9 movq %rbx,%rax imulq -128(%rsi),%rax addq %r9,%rax movq %rbx,%r10 imulq 8-128(%rsi),%r10 addq 8(%rsp),%r10 movq %rax,%r9 imull %r8d,%eax andl $0x1fffffff,%eax movq %rbx,%r11 imulq 16-128(%rsi),%r11 addq 16(%rsp),%r11 movq %rbx,%r12 imulq 24-128(%rsi),%r12 addq 24(%rsp),%r12 vpmuludq 32-128(%rsi),%ymm10,%ymm0 vmovd %eax,%xmm11 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq 64-128(%rsi),%ymm10,%ymm12 vpbroadcastq %xmm11,%ymm11 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq 96-128(%rsi),%ymm10,%ymm13 vpand %ymm15,%ymm3,%ymm3 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq 128-128(%rsi),%ymm10,%ymm0 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq 160-128(%rsi),%ymm10,%ymm12 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq 192-128(%rsi),%ymm10,%ymm13 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq 224-128(%rsi),%ymm10,%ymm0 vpermq $0x93,%ymm9,%ymm9 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq 256-128(%rsi),%ymm10,%ymm12 vpbroadcastq 8(%r13),%ymm10 vpaddq %ymm12,%ymm8,%ymm8 movq %rax,%rdx imulq -128(%rcx),%rax addq %rax,%r9 movq %rdx,%rax imulq 8-128(%rcx),%rax addq %rax,%r10 movq %rdx,%rax imulq 16-128(%rcx),%rax addq %rax,%r11 shrq $29,%r9 imulq 24-128(%rcx),%rdx addq %rdx,%r12 addq %r9,%r10 vpmuludq 32-128(%rcx),%ymm11,%ymm13 vmovq %xmm10,%rbx vpaddq %ymm13,%ymm1,%ymm1 vpmuludq 64-128(%rcx),%ymm11,%ymm0 vpaddq %ymm0,%ymm2,%ymm2 vpmuludq 96-128(%rcx),%ymm11,%ymm12 vpaddq %ymm12,%ymm3,%ymm3 vpmuludq 128-128(%rcx),%ymm11,%ymm13 vpaddq %ymm13,%ymm4,%ymm4 vpmuludq 160-128(%rcx),%ymm11,%ymm0 vpaddq %ymm0,%ymm5,%ymm5 vpmuludq 192-128(%rcx),%ymm11,%ymm12 vpaddq %ymm12,%ymm6,%ymm6 vpmuludq 224-128(%rcx),%ymm11,%ymm13 vpblendd $3,%ymm14,%ymm9,%ymm12 vpaddq %ymm13,%ymm7,%ymm7 vpmuludq 256-128(%rcx),%ymm11,%ymm0 vpaddq %ymm12,%ymm3,%ymm3 vpaddq %ymm0,%ymm8,%ymm8 movq %rbx,%rax imulq -128(%rsi),%rax addq %rax,%r10 vmovdqu -8+32-128(%rsi),%ymm12 movq %rbx,%rax imulq 8-128(%rsi),%rax addq %rax,%r11 vmovdqu -8+64-128(%rsi),%ymm13 movq %r10,%rax vpblendd $0xfc,%ymm14,%ymm9,%ymm9 imull %r8d,%eax vpaddq %ymm9,%ymm4,%ymm4 andl $0x1fffffff,%eax imulq 16-128(%rsi),%rbx addq %rbx,%r12 vpmuludq %ymm10,%ymm12,%ymm12 vmovd %eax,%xmm11 vmovdqu -8+96-128(%rsi),%ymm0 vpaddq %ymm12,%ymm1,%ymm1 vpmuludq %ymm10,%ymm13,%ymm13 vpbroadcastq %xmm11,%ymm11 vmovdqu -8+128-128(%rsi),%ymm12 vpaddq %ymm13,%ymm2,%ymm2 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -8+160-128(%rsi),%ymm13 vpaddq %ymm0,%ymm3,%ymm3 vpmuludq %ymm10,%ymm12,%ymm12 vmovdqu -8+192-128(%rsi),%ymm0 vpaddq %ymm12,%ymm4,%ymm4 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -8+224-128(%rsi),%ymm12 vpaddq %ymm13,%ymm5,%ymm5 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -8+256-128(%rsi),%ymm13 vpaddq %ymm0,%ymm6,%ymm6 vpmuludq %ymm10,%ymm12,%ymm12 vmovdqu -8+288-128(%rsi),%ymm9 vpaddq %ymm12,%ymm7,%ymm7 vpmuludq %ymm10,%ymm13,%ymm13 vpaddq %ymm13,%ymm8,%ymm8 vpmuludq %ymm10,%ymm9,%ymm9 vpbroadcastq 16(%r13),%ymm10 movq %rax,%rdx imulq -128(%rcx),%rax addq %rax,%r10 vmovdqu -8+32-128(%rcx),%ymm0 movq %rdx,%rax imulq 8-128(%rcx),%rax addq %rax,%r11 vmovdqu -8+64-128(%rcx),%ymm12 shrq $29,%r10 imulq 16-128(%rcx),%rdx addq %rdx,%r12 addq %r10,%r11 vpmuludq %ymm11,%ymm0,%ymm0 vmovq %xmm10,%rbx vmovdqu -8+96-128(%rcx),%ymm13 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -8+128-128(%rcx),%ymm0 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -8+160-128(%rcx),%ymm12 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -8+192-128(%rcx),%ymm13 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -8+224-128(%rcx),%ymm0 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -8+256-128(%rcx),%ymm12 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -8+288-128(%rcx),%ymm13 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq %ymm11,%ymm12,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq %ymm11,%ymm13,%ymm13 vpaddq %ymm13,%ymm9,%ymm9 vmovdqu -16+32-128(%rsi),%ymm0 movq %rbx,%rax imulq -128(%rsi),%rax addq %r11,%rax vmovdqu -16+64-128(%rsi),%ymm12 movq %rax,%r11 imull %r8d,%eax andl $0x1fffffff,%eax imulq 8-128(%rsi),%rbx addq %rbx,%r12 vpmuludq %ymm10,%ymm0,%ymm0 vmovd %eax,%xmm11 vmovdqu -16+96-128(%rsi),%ymm13 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq %ymm10,%ymm12,%ymm12 vpbroadcastq %xmm11,%ymm11 vmovdqu -16+128-128(%rsi),%ymm0 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -16+160-128(%rsi),%ymm12 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -16+192-128(%rsi),%ymm13 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq %ymm10,%ymm12,%ymm12 vmovdqu -16+224-128(%rsi),%ymm0 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -16+256-128(%rsi),%ymm12 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -16+288-128(%rsi),%ymm13 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq %ymm10,%ymm12,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq %ymm10,%ymm13,%ymm13 vpbroadcastq 24(%r13),%ymm10 vpaddq %ymm13,%ymm9,%ymm9 vmovdqu -16+32-128(%rcx),%ymm0 movq %rax,%rdx imulq -128(%rcx),%rax addq %rax,%r11 vmovdqu -16+64-128(%rcx),%ymm12 imulq 8-128(%rcx),%rdx addq %rdx,%r12 shrq $29,%r11 vpmuludq %ymm11,%ymm0,%ymm0 vmovq %xmm10,%rbx vmovdqu -16+96-128(%rcx),%ymm13 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -16+128-128(%rcx),%ymm0 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -16+160-128(%rcx),%ymm12 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -16+192-128(%rcx),%ymm13 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -16+224-128(%rcx),%ymm0 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -16+256-128(%rcx),%ymm12 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -16+288-128(%rcx),%ymm13 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -24+32-128(%rsi),%ymm0 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -24+64-128(%rsi),%ymm12 vpaddq %ymm13,%ymm9,%ymm9 addq %r11,%r12 imulq -128(%rsi),%rbx addq %rbx,%r12 movq %r12,%rax imull %r8d,%eax andl $0x1fffffff,%eax vpmuludq %ymm10,%ymm0,%ymm0 vmovd %eax,%xmm11 vmovdqu -24+96-128(%rsi),%ymm13 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq %ymm10,%ymm12,%ymm12 vpbroadcastq %xmm11,%ymm11 vmovdqu -24+128-128(%rsi),%ymm0 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -24+160-128(%rsi),%ymm12 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -24+192-128(%rsi),%ymm13 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq %ymm10,%ymm12,%ymm12 vmovdqu -24+224-128(%rsi),%ymm0 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -24+256-128(%rsi),%ymm12 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -24+288-128(%rsi),%ymm13 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq %ymm10,%ymm12,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq %ymm10,%ymm13,%ymm13 vpbroadcastq 32(%r13),%ymm10 vpaddq %ymm13,%ymm9,%ymm9 addq $32,%r13 vmovdqu -24+32-128(%rcx),%ymm0 imulq -128(%rcx),%rax addq %rax,%r12 shrq $29,%r12 vmovdqu -24+64-128(%rcx),%ymm12 vpmuludq %ymm11,%ymm0,%ymm0 vmovq %xmm10,%rbx vmovdqu -24+96-128(%rcx),%ymm13 vpaddq %ymm0,%ymm1,%ymm0 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu %ymm0,(%rsp) vpaddq %ymm12,%ymm2,%ymm1 vmovdqu -24+128-128(%rcx),%ymm0 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -24+160-128(%rcx),%ymm12 vpaddq %ymm13,%ymm3,%ymm2 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -24+192-128(%rcx),%ymm13 vpaddq %ymm0,%ymm4,%ymm3 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -24+224-128(%rcx),%ymm0 vpaddq %ymm12,%ymm5,%ymm4 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -24+256-128(%rcx),%ymm12 vpaddq %ymm13,%ymm6,%ymm5 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -24+288-128(%rcx),%ymm13 movq %r12,%r9 vpaddq %ymm0,%ymm7,%ymm6 vpmuludq %ymm11,%ymm12,%ymm12 addq (%rsp),%r9 vpaddq %ymm12,%ymm8,%ymm7 vpmuludq %ymm11,%ymm13,%ymm13 vmovq %r12,%xmm12 vpaddq %ymm13,%ymm9,%ymm8 decl %r14d jnz L$oop_mul_1024 vpaddq (%rsp),%ymm12,%ymm0 vpsrlq $29,%ymm0,%ymm12 vpand %ymm15,%ymm0,%ymm0 vpsrlq $29,%ymm1,%ymm13 vpand %ymm15,%ymm1,%ymm1 vpsrlq $29,%ymm2,%ymm10 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm2,%ymm2 vpsrlq $29,%ymm3,%ymm11 vpermq $0x93,%ymm13,%ymm13 vpand %ymm15,%ymm3,%ymm3 vpblendd $3,%ymm14,%ymm12,%ymm9 vpermq $0x93,%ymm10,%ymm10 vpblendd $3,%ymm12,%ymm13,%ymm12 vpermq $0x93,%ymm11,%ymm11 vpaddq %ymm9,%ymm0,%ymm0 vpblendd $3,%ymm13,%ymm10,%ymm13 vpaddq %ymm12,%ymm1,%ymm1 vpblendd $3,%ymm10,%ymm11,%ymm10 vpaddq %ymm13,%ymm2,%ymm2 vpblendd $3,%ymm11,%ymm14,%ymm11 vpaddq %ymm10,%ymm3,%ymm3 vpaddq %ymm11,%ymm4,%ymm4 vpsrlq $29,%ymm0,%ymm12 vpand %ymm15,%ymm0,%ymm0 vpsrlq $29,%ymm1,%ymm13 vpand %ymm15,%ymm1,%ymm1 vpsrlq $29,%ymm2,%ymm10 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm2,%ymm2 vpsrlq $29,%ymm3,%ymm11 vpermq $0x93,%ymm13,%ymm13 vpand %ymm15,%ymm3,%ymm3 vpermq $0x93,%ymm10,%ymm10 vpblendd $3,%ymm14,%ymm12,%ymm9 vpermq $0x93,%ymm11,%ymm11 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm9,%ymm0,%ymm0 vpblendd $3,%ymm13,%ymm10,%ymm13 vpaddq %ymm12,%ymm1,%ymm1 vpblendd $3,%ymm10,%ymm11,%ymm10 vpaddq %ymm13,%ymm2,%ymm2 vpblendd $3,%ymm11,%ymm14,%ymm11 vpaddq %ymm10,%ymm3,%ymm3 vpaddq %ymm11,%ymm4,%ymm4 vmovdqu %ymm0,0-128(%rdi) vmovdqu %ymm1,32-128(%rdi) vmovdqu %ymm2,64-128(%rdi) vmovdqu %ymm3,96-128(%rdi) vpsrlq $29,%ymm4,%ymm12 vpand %ymm15,%ymm4,%ymm4 vpsrlq $29,%ymm5,%ymm13 vpand %ymm15,%ymm5,%ymm5 vpsrlq $29,%ymm6,%ymm10 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm6,%ymm6 vpsrlq $29,%ymm7,%ymm11 vpermq $0x93,%ymm13,%ymm13 vpand %ymm15,%ymm7,%ymm7 vpsrlq $29,%ymm8,%ymm0 vpermq $0x93,%ymm10,%ymm10 vpand %ymm15,%ymm8,%ymm8 vpermq $0x93,%ymm11,%ymm11 vpblendd $3,%ymm14,%ymm12,%ymm9 vpermq $0x93,%ymm0,%ymm0 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm9,%ymm4,%ymm4 vpblendd $3,%ymm13,%ymm10,%ymm13 vpaddq %ymm12,%ymm5,%ymm5 vpblendd $3,%ymm10,%ymm11,%ymm10 vpaddq %ymm13,%ymm6,%ymm6 vpblendd $3,%ymm11,%ymm0,%ymm11 vpaddq %ymm10,%ymm7,%ymm7 vpaddq %ymm11,%ymm8,%ymm8 vpsrlq $29,%ymm4,%ymm12 vpand %ymm15,%ymm4,%ymm4 vpsrlq $29,%ymm5,%ymm13 vpand %ymm15,%ymm5,%ymm5 vpsrlq $29,%ymm6,%ymm10 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm6,%ymm6 vpsrlq $29,%ymm7,%ymm11 vpermq $0x93,%ymm13,%ymm13 vpand %ymm15,%ymm7,%ymm7 vpsrlq $29,%ymm8,%ymm0 vpermq $0x93,%ymm10,%ymm10 vpand %ymm15,%ymm8,%ymm8 vpermq $0x93,%ymm11,%ymm11 vpblendd $3,%ymm14,%ymm12,%ymm9 vpermq $0x93,%ymm0,%ymm0 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm9,%ymm4,%ymm4 vpblendd $3,%ymm13,%ymm10,%ymm13 vpaddq %ymm12,%ymm5,%ymm5 vpblendd $3,%ymm10,%ymm11,%ymm10 vpaddq %ymm13,%ymm6,%ymm6 vpblendd $3,%ymm11,%ymm0,%ymm11 vpaddq %ymm10,%ymm7,%ymm7 vpaddq %ymm11,%ymm8,%ymm8 vmovdqu %ymm4,128-128(%rdi) vmovdqu %ymm5,160-128(%rdi) vmovdqu %ymm6,192-128(%rdi) vmovdqu %ymm7,224-128(%rdi) vmovdqu %ymm8,256-128(%rdi) vzeroupper movq %rbp,%rax movq -48(%rax),%r15 movq -40(%rax),%r14 movq -32(%rax),%r13 movq -24(%rax),%r12 movq -16(%rax),%rbp movq -8(%rax),%rbx leaq (%rax),%rsp L$mul_1024_epilogue: .byte 0xf3,0xc3 .globl _rsaz_1024_red2norm_avx2 .private_extern _rsaz_1024_red2norm_avx2 .p2align 5 _rsaz_1024_red2norm_avx2: _CET_ENDBR subq $-128,%rsi xorq %rax,%rax movq -128(%rsi),%r8 movq -120(%rsi),%r9 movq -112(%rsi),%r10 shlq $0,%r8 shlq $29,%r9 movq %r10,%r11 shlq $58,%r10 shrq $6,%r11 addq %r8,%rax addq %r9,%rax addq %r10,%rax adcq $0,%r11 movq %rax,0(%rdi) movq %r11,%rax movq -104(%rsi),%r8 movq -96(%rsi),%r9 shlq $23,%r8 movq %r9,%r10 shlq $52,%r9 shrq $12,%r10 addq %r8,%rax addq %r9,%rax adcq $0,%r10 movq %rax,8(%rdi) movq %r10,%rax movq -88(%rsi),%r11 movq -80(%rsi),%r8 shlq $17,%r11 movq %r8,%r9 shlq $46,%r8 shrq $18,%r9 addq %r11,%rax addq %r8,%rax adcq $0,%r9 movq %rax,16(%rdi) movq %r9,%rax movq -72(%rsi),%r10 movq -64(%rsi),%r11 shlq $11,%r10 movq %r11,%r8 shlq $40,%r11 shrq $24,%r8 addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,24(%rdi) movq %r8,%rax movq -56(%rsi),%r9 movq -48(%rsi),%r10 movq -40(%rsi),%r11 shlq $5,%r9 shlq $34,%r10 movq %r11,%r8 shlq $63,%r11 shrq $1,%r8 addq %r9,%rax addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,32(%rdi) movq %r8,%rax movq -32(%rsi),%r9 movq -24(%rsi),%r10 shlq $28,%r9 movq %r10,%r11 shlq $57,%r10 shrq $7,%r11 addq %r9,%rax addq %r10,%rax adcq $0,%r11 movq %rax,40(%rdi) movq %r11,%rax movq -16(%rsi),%r8 movq -8(%rsi),%r9 shlq $22,%r8 movq %r9,%r10 shlq $51,%r9 shrq $13,%r10 addq %r8,%rax addq %r9,%rax adcq $0,%r10 movq %rax,48(%rdi) movq %r10,%rax movq 0(%rsi),%r11 movq 8(%rsi),%r8 shlq $16,%r11 movq %r8,%r9 shlq $45,%r8 shrq $19,%r9 addq %r11,%rax addq %r8,%rax adcq $0,%r9 movq %rax,56(%rdi) movq %r9,%rax movq 16(%rsi),%r10 movq 24(%rsi),%r11 shlq $10,%r10 movq %r11,%r8 shlq $39,%r11 shrq $25,%r8 addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,64(%rdi) movq %r8,%rax movq 32(%rsi),%r9 movq 40(%rsi),%r10 movq 48(%rsi),%r11 shlq $4,%r9 shlq $33,%r10 movq %r11,%r8 shlq $62,%r11 shrq $2,%r8 addq %r9,%rax addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,72(%rdi) movq %r8,%rax movq 56(%rsi),%r9 movq 64(%rsi),%r10 shlq $27,%r9 movq %r10,%r11 shlq $56,%r10 shrq $8,%r11 addq %r9,%rax addq %r10,%rax adcq $0,%r11 movq %rax,80(%rdi) movq %r11,%rax movq 72(%rsi),%r8 movq 80(%rsi),%r9 shlq $21,%r8 movq %r9,%r10 shlq $50,%r9 shrq $14,%r10 addq %r8,%rax addq %r9,%rax adcq $0,%r10 movq %rax,88(%rdi) movq %r10,%rax movq 88(%rsi),%r11 movq 96(%rsi),%r8 shlq $15,%r11 movq %r8,%r9 shlq $44,%r8 shrq $20,%r9 addq %r11,%rax addq %r8,%rax adcq $0,%r9 movq %rax,96(%rdi) movq %r9,%rax movq 104(%rsi),%r10 movq 112(%rsi),%r11 shlq $9,%r10 movq %r11,%r8 shlq $38,%r11 shrq $26,%r8 addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,104(%rdi) movq %r8,%rax movq 120(%rsi),%r9 movq 128(%rsi),%r10 movq 136(%rsi),%r11 shlq $3,%r9 shlq $32,%r10 movq %r11,%r8 shlq $61,%r11 shrq $3,%r8 addq %r9,%rax addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,112(%rdi) movq %r8,%rax movq 144(%rsi),%r9 movq 152(%rsi),%r10 shlq $26,%r9 movq %r10,%r11 shlq $55,%r10 shrq $9,%r11 addq %r9,%rax addq %r10,%rax adcq $0,%r11 movq %rax,120(%rdi) movq %r11,%rax .byte 0xf3,0xc3 .globl _rsaz_1024_norm2red_avx2 .private_extern _rsaz_1024_norm2red_avx2 .p2align 5 _rsaz_1024_norm2red_avx2: _CET_ENDBR subq $-128,%rdi movq (%rsi),%r8 movl $0x1fffffff,%eax movq 8(%rsi),%r9 movq %r8,%r11 shrq $0,%r11 andq %rax,%r11 movq %r11,-128(%rdi) movq %r8,%r10 shrq $29,%r10 andq %rax,%r10 movq %r10,-120(%rdi) shrdq $58,%r9,%r8 andq %rax,%r8 movq %r8,-112(%rdi) movq 16(%rsi),%r10 movq %r9,%r8 shrq $23,%r8 andq %rax,%r8 movq %r8,-104(%rdi) shrdq $52,%r10,%r9 andq %rax,%r9 movq %r9,-96(%rdi) movq 24(%rsi),%r11 movq %r10,%r9 shrq $17,%r9 andq %rax,%r9 movq %r9,-88(%rdi) shrdq $46,%r11,%r10 andq %rax,%r10 movq %r10,-80(%rdi) movq 32(%rsi),%r8 movq %r11,%r10 shrq $11,%r10 andq %rax,%r10 movq %r10,-72(%rdi) shrdq $40,%r8,%r11 andq %rax,%r11 movq %r11,-64(%rdi) movq 40(%rsi),%r9 movq %r8,%r11 shrq $5,%r11 andq %rax,%r11 movq %r11,-56(%rdi) movq %r8,%r10 shrq $34,%r10 andq %rax,%r10 movq %r10,-48(%rdi) shrdq $63,%r9,%r8 andq %rax,%r8 movq %r8,-40(%rdi) movq 48(%rsi),%r10 movq %r9,%r8 shrq $28,%r8 andq %rax,%r8 movq %r8,-32(%rdi) shrdq $57,%r10,%r9 andq %rax,%r9 movq %r9,-24(%rdi) movq 56(%rsi),%r11 movq %r10,%r9 shrq $22,%r9 andq %rax,%r9 movq %r9,-16(%rdi) shrdq $51,%r11,%r10 andq %rax,%r10 movq %r10,-8(%rdi) movq 64(%rsi),%r8 movq %r11,%r10 shrq $16,%r10 andq %rax,%r10 movq %r10,0(%rdi) shrdq $45,%r8,%r11 andq %rax,%r11 movq %r11,8(%rdi) movq 72(%rsi),%r9 movq %r8,%r11 shrq $10,%r11 andq %rax,%r11 movq %r11,16(%rdi) shrdq $39,%r9,%r8 andq %rax,%r8 movq %r8,24(%rdi) movq 80(%rsi),%r10 movq %r9,%r8 shrq $4,%r8 andq %rax,%r8 movq %r8,32(%rdi) movq %r9,%r11 shrq $33,%r11 andq %rax,%r11 movq %r11,40(%rdi) shrdq $62,%r10,%r9 andq %rax,%r9 movq %r9,48(%rdi) movq 88(%rsi),%r11 movq %r10,%r9 shrq $27,%r9 andq %rax,%r9 movq %r9,56(%rdi) shrdq $56,%r11,%r10 andq %rax,%r10 movq %r10,64(%rdi) movq 96(%rsi),%r8 movq %r11,%r10 shrq $21,%r10 andq %rax,%r10 movq %r10,72(%rdi) shrdq $50,%r8,%r11 andq %rax,%r11 movq %r11,80(%rdi) movq 104(%rsi),%r9 movq %r8,%r11 shrq $15,%r11 andq %rax,%r11 movq %r11,88(%rdi) shrdq $44,%r9,%r8 andq %rax,%r8 movq %r8,96(%rdi) movq 112(%rsi),%r10 movq %r9,%r8 shrq $9,%r8 andq %rax,%r8 movq %r8,104(%rdi) shrdq $38,%r10,%r9 andq %rax,%r9 movq %r9,112(%rdi) movq 120(%rsi),%r11 movq %r10,%r9 shrq $3,%r9 andq %rax,%r9 movq %r9,120(%rdi) movq %r10,%r8 shrq $32,%r8 andq %rax,%r8 movq %r8,128(%rdi) shrdq $61,%r11,%r10 andq %rax,%r10 movq %r10,136(%rdi) xorq %r8,%r8 movq %r11,%r10 shrq $26,%r10 andq %rax,%r10 movq %r10,144(%rdi) shrdq $55,%r8,%r11 andq %rax,%r11 movq %r11,152(%rdi) movq %r8,160(%rdi) movq %r8,168(%rdi) movq %r8,176(%rdi) movq %r8,184(%rdi) .byte 0xf3,0xc3 .globl _rsaz_1024_scatter5_avx2 .private_extern _rsaz_1024_scatter5_avx2 .p2align 5 _rsaz_1024_scatter5_avx2: _CET_ENDBR vzeroupper vmovdqu L$scatter_permd(%rip),%ymm5 shll $4,%edx leaq (%rdi,%rdx,1),%rdi movl $9,%eax jmp L$oop_scatter_1024 .p2align 5 L$oop_scatter_1024: vmovdqu (%rsi),%ymm0 leaq 32(%rsi),%rsi vpermd %ymm0,%ymm5,%ymm0 vmovdqu %xmm0,(%rdi) leaq 512(%rdi),%rdi decl %eax jnz L$oop_scatter_1024 vzeroupper .byte 0xf3,0xc3 .globl _rsaz_1024_gather5_avx2 .private_extern _rsaz_1024_gather5_avx2 .p2align 5 _rsaz_1024_gather5_avx2: _CET_ENDBR vzeroupper movq %rsp,%r11 leaq -256(%rsp),%rsp andq $-32,%rsp leaq L$inc(%rip),%r10 leaq -128(%rsp),%rax vmovd %edx,%xmm4 vmovdqa (%r10),%ymm0 vmovdqa 32(%r10),%ymm1 vmovdqa 64(%r10),%ymm5 vpbroadcastd %xmm4,%ymm4 vpaddd %ymm5,%ymm0,%ymm2 vpcmpeqd %ymm4,%ymm0,%ymm0 vpaddd %ymm5,%ymm1,%ymm3 vpcmpeqd %ymm4,%ymm1,%ymm1 vmovdqa %ymm0,0+128(%rax) vpaddd %ymm5,%ymm2,%ymm0 vpcmpeqd %ymm4,%ymm2,%ymm2 vmovdqa %ymm1,32+128(%rax) vpaddd %ymm5,%ymm3,%ymm1 vpcmpeqd %ymm4,%ymm3,%ymm3 vmovdqa %ymm2,64+128(%rax) vpaddd %ymm5,%ymm0,%ymm2 vpcmpeqd %ymm4,%ymm0,%ymm0 vmovdqa %ymm3,96+128(%rax) vpaddd %ymm5,%ymm1,%ymm3 vpcmpeqd %ymm4,%ymm1,%ymm1 vmovdqa %ymm0,128+128(%rax) vpaddd %ymm5,%ymm2,%ymm8 vpcmpeqd %ymm4,%ymm2,%ymm2 vmovdqa %ymm1,160+128(%rax) vpaddd %ymm5,%ymm3,%ymm9 vpcmpeqd %ymm4,%ymm3,%ymm3 vmovdqa %ymm2,192+128(%rax) vpaddd %ymm5,%ymm8,%ymm10 vpcmpeqd %ymm4,%ymm8,%ymm8 vmovdqa %ymm3,224+128(%rax) vpaddd %ymm5,%ymm9,%ymm11 vpcmpeqd %ymm4,%ymm9,%ymm9 vpaddd %ymm5,%ymm10,%ymm12 vpcmpeqd %ymm4,%ymm10,%ymm10 vpaddd %ymm5,%ymm11,%ymm13 vpcmpeqd %ymm4,%ymm11,%ymm11 vpaddd %ymm5,%ymm12,%ymm14 vpcmpeqd %ymm4,%ymm12,%ymm12 vpaddd %ymm5,%ymm13,%ymm15 vpcmpeqd %ymm4,%ymm13,%ymm13 vpcmpeqd %ymm4,%ymm14,%ymm14 vpcmpeqd %ymm4,%ymm15,%ymm15 vmovdqa -32(%r10),%ymm7 leaq 128(%rsi),%rsi movl $9,%edx L$oop_gather_1024: vmovdqa 0-128(%rsi),%ymm0 vmovdqa 32-128(%rsi),%ymm1 vmovdqa 64-128(%rsi),%ymm2 vmovdqa 96-128(%rsi),%ymm3 vpand 0+128(%rax),%ymm0,%ymm0 vpand 32+128(%rax),%ymm1,%ymm1 vpand 64+128(%rax),%ymm2,%ymm2 vpor %ymm0,%ymm1,%ymm4 vpand 96+128(%rax),%ymm3,%ymm3 vmovdqa 128-128(%rsi),%ymm0 vmovdqa 160-128(%rsi),%ymm1 vpor %ymm2,%ymm3,%ymm5 vmovdqa 192-128(%rsi),%ymm2 vmovdqa 224-128(%rsi),%ymm3 vpand 128+128(%rax),%ymm0,%ymm0 vpand 160+128(%rax),%ymm1,%ymm1 vpand 192+128(%rax),%ymm2,%ymm2 vpor %ymm0,%ymm4,%ymm4 vpand 224+128(%rax),%ymm3,%ymm3 vpand 256-128(%rsi),%ymm8,%ymm0 vpor %ymm1,%ymm5,%ymm5 vpand 288-128(%rsi),%ymm9,%ymm1 vpor %ymm2,%ymm4,%ymm4 vpand 320-128(%rsi),%ymm10,%ymm2 vpor %ymm3,%ymm5,%ymm5 vpand 352-128(%rsi),%ymm11,%ymm3 vpor %ymm0,%ymm4,%ymm4 vpand 384-128(%rsi),%ymm12,%ymm0 vpor %ymm1,%ymm5,%ymm5 vpand 416-128(%rsi),%ymm13,%ymm1 vpor %ymm2,%ymm4,%ymm4 vpand 448-128(%rsi),%ymm14,%ymm2 vpor %ymm3,%ymm5,%ymm5 vpand 480-128(%rsi),%ymm15,%ymm3 leaq 512(%rsi),%rsi vpor %ymm0,%ymm4,%ymm4 vpor %ymm1,%ymm5,%ymm5 vpor %ymm2,%ymm4,%ymm4 vpor %ymm3,%ymm5,%ymm5 vpor %ymm5,%ymm4,%ymm4 vextracti128 $1,%ymm4,%xmm5 vpor %xmm4,%xmm5,%xmm5 vpermd %ymm5,%ymm7,%ymm5 vmovdqu %ymm5,(%rdi) leaq 32(%rdi),%rdi decl %edx jnz L$oop_gather_1024 vpxor %ymm0,%ymm0,%ymm0 vmovdqu %ymm0,(%rdi) vzeroupper leaq (%r11),%rsp .byte 0xf3,0xc3 L$SEH_end_rsaz_1024_gather5: .section __DATA,__const .p2align 6 L$and_mask: .quad 0x1fffffff,0x1fffffff,0x1fffffff,0x1fffffff L$scatter_permd: .long 0,2,4,6,7,7,7,7 L$gather_permd: .long 0,7,1,7,2,7,3,7 L$inc: .long 0,0,0,0, 1,1,1,1 .long 2,2,2,2, 3,3,3,3 .long 4,4,4,4, 4,4,4,4 .p2align 6 .text #endif
marvin-hansen/iggy-streaming-system
30,594
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/rsaz-4k-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl _rsaz_amm52x40_x1_ifma256 .private_extern _rsaz_amm52x40_x1_ifma256 .p2align 5 _rsaz_amm52x40_x1_ifma256: .byte 243,15,30,250 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm6 vmovdqa64 %ymm0,%ymm7 vmovdqa64 %ymm0,%ymm8 vmovdqa64 %ymm0,%ymm9 vmovdqa64 %ymm0,%ymm10 vmovdqa64 %ymm0,%ymm11 vmovdqa64 %ymm0,%ymm12 xorl %r9d,%r9d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $10,%ebx .p2align 5 L$loop10: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 movq 8(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 movq 16(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 movq 24(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 leaq 32(%r11),%r11 decl %ebx jne L$loop10 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm4,%ymm1 vpsrlq $52,%ymm5,%ymm2 vpsrlq $52,%ymm6,%ymm23 vpsrlq $52,%ymm7,%ymm24 vpsrlq $52,%ymm8,%ymm25 vpsrlq $52,%ymm9,%ymm26 vpsrlq $52,%ymm10,%ymm27 vpsrlq $52,%ymm11,%ymm28 vpsrlq $52,%ymm12,%ymm29 valignq $3,%ymm28,%ymm29,%ymm29 valignq $3,%ymm27,%ymm28,%ymm28 valignq $3,%ymm26,%ymm27,%ymm27 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm24,%ymm25,%ymm25 valignq $3,%ymm23,%ymm24,%ymm24 valignq $3,%ymm2,%ymm23,%ymm23 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm5,%ymm5 vpandq L$mask52x4(%rip),%ymm6,%ymm6 vpandq L$mask52x4(%rip),%ymm7,%ymm7 vpandq L$mask52x4(%rip),%ymm8,%ymm8 vpandq L$mask52x4(%rip),%ymm9,%ymm9 vpandq L$mask52x4(%rip),%ymm10,%ymm10 vpandq L$mask52x4(%rip),%ymm11,%ymm11 vpandq L$mask52x4(%rip),%ymm12,%ymm12 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm4,%ymm4 vpaddq %ymm2,%ymm5,%ymm5 vpaddq %ymm23,%ymm6,%ymm6 vpaddq %ymm24,%ymm7,%ymm7 vpaddq %ymm25,%ymm8,%ymm8 vpaddq %ymm26,%ymm9,%ymm9 vpaddq %ymm27,%ymm10,%ymm10 vpaddq %ymm28,%ymm11,%ymm11 vpaddq %ymm29,%ymm12,%ymm12 vpcmpuq $6,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm4,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,L$mask52x4(%rip),%ymm5,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm6,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,L$mask52x4(%rip),%ymm7,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm8,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,L$mask52x4(%rip),%ymm9,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm10,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b vpcmpuq $6,L$mask52x4(%rip),%ymm11,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm12,%k2 kmovb %k1,%r10d kmovb %k2,%r9d shlb $4,%r9b orb %r9b,%r10b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b adcb %r10b,%r10b vpcmpuq $0,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm4,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,L$mask52x4(%rip),%ymm5,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm6,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,L$mask52x4(%rip),%ymm7,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm8,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,L$mask52x4(%rip),%ymm9,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm10,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl vpcmpuq $0,L$mask52x4(%rip),%ymm11,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm12,%k2 kmovb %k1,%ebx kmovb %k2,%eax shlb $4,%al orb %al,%bl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b adcb %bl,%r10b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b xorb %bl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq L$mask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq L$mask52x4(%rip),%ymm4,%ymm4{%k2} vpsubq L$mask52x4(%rip),%ymm5,%ymm5{%k3} vpsubq L$mask52x4(%rip),%ymm6,%ymm6{%k4} vpsubq L$mask52x4(%rip),%ymm7,%ymm7{%k5} vpsubq L$mask52x4(%rip),%ymm8,%ymm8{%k6} vpsubq L$mask52x4(%rip),%ymm9,%ymm9{%k7} vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm5,%ymm5 vpandq L$mask52x4(%rip),%ymm6,%ymm6 vpandq L$mask52x4(%rip),%ymm7,%ymm7 vpandq L$mask52x4(%rip),%ymm8,%ymm8 vpandq L$mask52x4(%rip),%ymm9,%ymm9 shrb $4,%r11b kmovb %r11d,%k1 kmovb %r10d,%k2 shrb $4,%r10b kmovb %r10d,%k3 vpsubq L$mask52x4(%rip),%ymm10,%ymm10{%k1} vpsubq L$mask52x4(%rip),%ymm11,%ymm11{%k2} vpsubq L$mask52x4(%rip),%ymm12,%ymm12{%k3} vpandq L$mask52x4(%rip),%ymm10,%ymm10 vpandq L$mask52x4(%rip),%ymm11,%ymm11 vpandq L$mask52x4(%rip),%ymm12,%ymm12 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm4,32(%rdi) vmovdqu64 %ymm5,64(%rdi) vmovdqu64 %ymm6,96(%rdi) vmovdqu64 %ymm7,128(%rdi) vmovdqu64 %ymm8,160(%rdi) vmovdqu64 %ymm9,192(%rdi) vmovdqu64 %ymm10,224(%rdi) vmovdqu64 %ymm11,256(%rdi) vmovdqu64 %ymm12,288(%rdi) vzeroupper leaq (%rsp),%rax movq 0(%rax),%r15 movq 8(%rax),%r14 movq 16(%rax),%r13 movq 24(%rax),%r12 movq 32(%rax),%rbp movq 40(%rax),%rbx leaq 48(%rax),%rsp L$rsaz_amm52x40_x1_ifma256_epilogue: .byte 0xf3,0xc3 .section __DATA,__const .p2align 5 L$mask52x4: .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .text .text .globl _rsaz_amm52x40_x2_ifma256 .private_extern _rsaz_amm52x40_x2_ifma256 .p2align 5 _rsaz_amm52x40_x2_ifma256: .byte 243,15,30,250 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm6 vmovdqa64 %ymm0,%ymm7 vmovdqa64 %ymm0,%ymm8 vmovdqa64 %ymm0,%ymm9 vmovdqa64 %ymm0,%ymm10 vmovdqa64 %ymm0,%ymm11 vmovdqa64 %ymm0,%ymm12 vmovdqa64 %ymm0,%ymm13 vmovdqa64 %ymm0,%ymm14 vmovdqa64 %ymm0,%ymm15 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 vmovdqa64 %ymm0,%ymm20 vmovdqa64 %ymm0,%ymm21 vmovdqa64 %ymm0,%ymm22 xorl %r9d,%r9d xorl %r15d,%r15d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $40,%ebx .p2align 5 L$loop40: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq (%r8),%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 movq 320(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 320(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 movq %r12,%r10 adcq $0,%r10 movq 8(%r8),%r13 imulq %r15,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 320(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 adcq %r12,%r10 shrq $52,%r15 salq $12,%r10 orq %r10,%r15 vpmadd52luq 320(%rsi),%ymm1,%ymm13 vpmadd52luq 352(%rsi),%ymm1,%ymm14 vpmadd52luq 384(%rsi),%ymm1,%ymm15 vpmadd52luq 416(%rsi),%ymm1,%ymm16 vpmadd52luq 448(%rsi),%ymm1,%ymm17 vpmadd52luq 480(%rsi),%ymm1,%ymm18 vpmadd52luq 512(%rsi),%ymm1,%ymm19 vpmadd52luq 544(%rsi),%ymm1,%ymm20 vpmadd52luq 576(%rsi),%ymm1,%ymm21 vpmadd52luq 608(%rsi),%ymm1,%ymm22 vpmadd52luq 320(%rcx),%ymm2,%ymm13 vpmadd52luq 352(%rcx),%ymm2,%ymm14 vpmadd52luq 384(%rcx),%ymm2,%ymm15 vpmadd52luq 416(%rcx),%ymm2,%ymm16 vpmadd52luq 448(%rcx),%ymm2,%ymm17 vpmadd52luq 480(%rcx),%ymm2,%ymm18 vpmadd52luq 512(%rcx),%ymm2,%ymm19 vpmadd52luq 544(%rcx),%ymm2,%ymm20 vpmadd52luq 576(%rcx),%ymm2,%ymm21 vpmadd52luq 608(%rcx),%ymm2,%ymm22 valignq $1,%ymm13,%ymm14,%ymm13 valignq $1,%ymm14,%ymm15,%ymm14 valignq $1,%ymm15,%ymm16,%ymm15 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm20,%ymm19 valignq $1,%ymm20,%ymm21,%ymm20 valignq $1,%ymm21,%ymm22,%ymm21 valignq $1,%ymm22,%ymm0,%ymm22 vmovq %xmm13,%r13 addq %r13,%r15 vpmadd52huq 320(%rsi),%ymm1,%ymm13 vpmadd52huq 352(%rsi),%ymm1,%ymm14 vpmadd52huq 384(%rsi),%ymm1,%ymm15 vpmadd52huq 416(%rsi),%ymm1,%ymm16 vpmadd52huq 448(%rsi),%ymm1,%ymm17 vpmadd52huq 480(%rsi),%ymm1,%ymm18 vpmadd52huq 512(%rsi),%ymm1,%ymm19 vpmadd52huq 544(%rsi),%ymm1,%ymm20 vpmadd52huq 576(%rsi),%ymm1,%ymm21 vpmadd52huq 608(%rsi),%ymm1,%ymm22 vpmadd52huq 320(%rcx),%ymm2,%ymm13 vpmadd52huq 352(%rcx),%ymm2,%ymm14 vpmadd52huq 384(%rcx),%ymm2,%ymm15 vpmadd52huq 416(%rcx),%ymm2,%ymm16 vpmadd52huq 448(%rcx),%ymm2,%ymm17 vpmadd52huq 480(%rcx),%ymm2,%ymm18 vpmadd52huq 512(%rcx),%ymm2,%ymm19 vpmadd52huq 544(%rcx),%ymm2,%ymm20 vpmadd52huq 576(%rcx),%ymm2,%ymm21 vpmadd52huq 608(%rcx),%ymm2,%ymm22 leaq 8(%r11),%r11 decl %ebx jne L$loop40 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm4,%ymm1 vpsrlq $52,%ymm5,%ymm2 vpsrlq $52,%ymm6,%ymm23 vpsrlq $52,%ymm7,%ymm24 vpsrlq $52,%ymm8,%ymm25 vpsrlq $52,%ymm9,%ymm26 vpsrlq $52,%ymm10,%ymm27 vpsrlq $52,%ymm11,%ymm28 vpsrlq $52,%ymm12,%ymm29 valignq $3,%ymm28,%ymm29,%ymm29 valignq $3,%ymm27,%ymm28,%ymm28 valignq $3,%ymm26,%ymm27,%ymm27 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm24,%ymm25,%ymm25 valignq $3,%ymm23,%ymm24,%ymm24 valignq $3,%ymm2,%ymm23,%ymm23 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm5,%ymm5 vpandq L$mask52x4(%rip),%ymm6,%ymm6 vpandq L$mask52x4(%rip),%ymm7,%ymm7 vpandq L$mask52x4(%rip),%ymm8,%ymm8 vpandq L$mask52x4(%rip),%ymm9,%ymm9 vpandq L$mask52x4(%rip),%ymm10,%ymm10 vpandq L$mask52x4(%rip),%ymm11,%ymm11 vpandq L$mask52x4(%rip),%ymm12,%ymm12 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm4,%ymm4 vpaddq %ymm2,%ymm5,%ymm5 vpaddq %ymm23,%ymm6,%ymm6 vpaddq %ymm24,%ymm7,%ymm7 vpaddq %ymm25,%ymm8,%ymm8 vpaddq %ymm26,%ymm9,%ymm9 vpaddq %ymm27,%ymm10,%ymm10 vpaddq %ymm28,%ymm11,%ymm11 vpaddq %ymm29,%ymm12,%ymm12 vpcmpuq $6,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm4,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,L$mask52x4(%rip),%ymm5,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm6,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,L$mask52x4(%rip),%ymm7,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm8,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,L$mask52x4(%rip),%ymm9,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm10,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b vpcmpuq $6,L$mask52x4(%rip),%ymm11,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm12,%k2 kmovb %k1,%r10d kmovb %k2,%r9d shlb $4,%r9b orb %r9b,%r10b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b adcb %r10b,%r10b vpcmpuq $0,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm4,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,L$mask52x4(%rip),%ymm5,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm6,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,L$mask52x4(%rip),%ymm7,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm8,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,L$mask52x4(%rip),%ymm9,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm10,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl vpcmpuq $0,L$mask52x4(%rip),%ymm11,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm12,%k2 kmovb %k1,%ebx kmovb %k2,%eax shlb $4,%al orb %al,%bl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b adcb %bl,%r10b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b xorb %bl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq L$mask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq L$mask52x4(%rip),%ymm4,%ymm4{%k2} vpsubq L$mask52x4(%rip),%ymm5,%ymm5{%k3} vpsubq L$mask52x4(%rip),%ymm6,%ymm6{%k4} vpsubq L$mask52x4(%rip),%ymm7,%ymm7{%k5} vpsubq L$mask52x4(%rip),%ymm8,%ymm8{%k6} vpsubq L$mask52x4(%rip),%ymm9,%ymm9{%k7} vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm5,%ymm5 vpandq L$mask52x4(%rip),%ymm6,%ymm6 vpandq L$mask52x4(%rip),%ymm7,%ymm7 vpandq L$mask52x4(%rip),%ymm8,%ymm8 vpandq L$mask52x4(%rip),%ymm9,%ymm9 shrb $4,%r11b kmovb %r11d,%k1 kmovb %r10d,%k2 shrb $4,%r10b kmovb %r10d,%k3 vpsubq L$mask52x4(%rip),%ymm10,%ymm10{%k1} vpsubq L$mask52x4(%rip),%ymm11,%ymm11{%k2} vpsubq L$mask52x4(%rip),%ymm12,%ymm12{%k3} vpandq L$mask52x4(%rip),%ymm10,%ymm10 vpandq L$mask52x4(%rip),%ymm11,%ymm11 vpandq L$mask52x4(%rip),%ymm12,%ymm12 vpbroadcastq %r15,%ymm0 vpblendd $3,%ymm0,%ymm13,%ymm13 vpsrlq $52,%ymm13,%ymm0 vpsrlq $52,%ymm14,%ymm1 vpsrlq $52,%ymm15,%ymm2 vpsrlq $52,%ymm16,%ymm23 vpsrlq $52,%ymm17,%ymm24 vpsrlq $52,%ymm18,%ymm25 vpsrlq $52,%ymm19,%ymm26 vpsrlq $52,%ymm20,%ymm27 vpsrlq $52,%ymm21,%ymm28 vpsrlq $52,%ymm22,%ymm29 valignq $3,%ymm28,%ymm29,%ymm29 valignq $3,%ymm27,%ymm28,%ymm28 valignq $3,%ymm26,%ymm27,%ymm27 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm24,%ymm25,%ymm25 valignq $3,%ymm23,%ymm24,%ymm24 valignq $3,%ymm2,%ymm23,%ymm23 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm13,%ymm13 vpandq L$mask52x4(%rip),%ymm14,%ymm14 vpandq L$mask52x4(%rip),%ymm15,%ymm15 vpandq L$mask52x4(%rip),%ymm16,%ymm16 vpandq L$mask52x4(%rip),%ymm17,%ymm17 vpandq L$mask52x4(%rip),%ymm18,%ymm18 vpandq L$mask52x4(%rip),%ymm19,%ymm19 vpandq L$mask52x4(%rip),%ymm20,%ymm20 vpandq L$mask52x4(%rip),%ymm21,%ymm21 vpandq L$mask52x4(%rip),%ymm22,%ymm22 vpaddq %ymm0,%ymm13,%ymm13 vpaddq %ymm1,%ymm14,%ymm14 vpaddq %ymm2,%ymm15,%ymm15 vpaddq %ymm23,%ymm16,%ymm16 vpaddq %ymm24,%ymm17,%ymm17 vpaddq %ymm25,%ymm18,%ymm18 vpaddq %ymm26,%ymm19,%ymm19 vpaddq %ymm27,%ymm20,%ymm20 vpaddq %ymm28,%ymm21,%ymm21 vpaddq %ymm29,%ymm22,%ymm22 vpcmpuq $6,L$mask52x4(%rip),%ymm13,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm14,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,L$mask52x4(%rip),%ymm15,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm16,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,L$mask52x4(%rip),%ymm17,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm18,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,L$mask52x4(%rip),%ymm19,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm20,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b vpcmpuq $6,L$mask52x4(%rip),%ymm21,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm22,%k2 kmovb %k1,%r10d kmovb %k2,%r9d shlb $4,%r9b orb %r9b,%r10b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b adcb %r10b,%r10b vpcmpuq $0,L$mask52x4(%rip),%ymm13,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm14,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,L$mask52x4(%rip),%ymm15,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm16,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,L$mask52x4(%rip),%ymm17,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm18,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,L$mask52x4(%rip),%ymm19,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm20,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl vpcmpuq $0,L$mask52x4(%rip),%ymm21,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm22,%k2 kmovb %k1,%ebx kmovb %k2,%eax shlb $4,%al orb %al,%bl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b adcb %bl,%r10b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b xorb %bl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq L$mask52x4(%rip),%ymm13,%ymm13{%k1} vpsubq L$mask52x4(%rip),%ymm14,%ymm14{%k2} vpsubq L$mask52x4(%rip),%ymm15,%ymm15{%k3} vpsubq L$mask52x4(%rip),%ymm16,%ymm16{%k4} vpsubq L$mask52x4(%rip),%ymm17,%ymm17{%k5} vpsubq L$mask52x4(%rip),%ymm18,%ymm18{%k6} vpsubq L$mask52x4(%rip),%ymm19,%ymm19{%k7} vpandq L$mask52x4(%rip),%ymm13,%ymm13 vpandq L$mask52x4(%rip),%ymm14,%ymm14 vpandq L$mask52x4(%rip),%ymm15,%ymm15 vpandq L$mask52x4(%rip),%ymm16,%ymm16 vpandq L$mask52x4(%rip),%ymm17,%ymm17 vpandq L$mask52x4(%rip),%ymm18,%ymm18 vpandq L$mask52x4(%rip),%ymm19,%ymm19 shrb $4,%r11b kmovb %r11d,%k1 kmovb %r10d,%k2 shrb $4,%r10b kmovb %r10d,%k3 vpsubq L$mask52x4(%rip),%ymm20,%ymm20{%k1} vpsubq L$mask52x4(%rip),%ymm21,%ymm21{%k2} vpsubq L$mask52x4(%rip),%ymm22,%ymm22{%k3} vpandq L$mask52x4(%rip),%ymm20,%ymm20 vpandq L$mask52x4(%rip),%ymm21,%ymm21 vpandq L$mask52x4(%rip),%ymm22,%ymm22 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm4,32(%rdi) vmovdqu64 %ymm5,64(%rdi) vmovdqu64 %ymm6,96(%rdi) vmovdqu64 %ymm7,128(%rdi) vmovdqu64 %ymm8,160(%rdi) vmovdqu64 %ymm9,192(%rdi) vmovdqu64 %ymm10,224(%rdi) vmovdqu64 %ymm11,256(%rdi) vmovdqu64 %ymm12,288(%rdi) vmovdqu64 %ymm13,320(%rdi) vmovdqu64 %ymm14,352(%rdi) vmovdqu64 %ymm15,384(%rdi) vmovdqu64 %ymm16,416(%rdi) vmovdqu64 %ymm17,448(%rdi) vmovdqu64 %ymm18,480(%rdi) vmovdqu64 %ymm19,512(%rdi) vmovdqu64 %ymm20,544(%rdi) vmovdqu64 %ymm21,576(%rdi) vmovdqu64 %ymm22,608(%rdi) vzeroupper leaq (%rsp),%rax movq 0(%rax),%r15 movq 8(%rax),%r14 movq 16(%rax),%r13 movq 24(%rax),%r12 movq 32(%rax),%rbp movq 40(%rax),%rbx leaq 48(%rax),%rsp L$rsaz_amm52x40_x2_ifma256_epilogue: .byte 0xf3,0xc3 .text .p2align 5 .globl _extract_multiplier_2x40_win5 .private_extern _extract_multiplier_2x40_win5 _extract_multiplier_2x40_win5: .byte 243,15,30,250 vmovdqa64 L$ones(%rip),%ymm24 vpbroadcastq %rdx,%ymm22 vpbroadcastq %rcx,%ymm23 leaq 20480(%rsi),%rax movq %rsi,%r10 vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %ymm0,%ymm1 vmovdqa64 %ymm0,%ymm2 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 vpxorq %ymm21,%ymm21,%ymm21 .p2align 5 L$loop_0: vpcmpq $0,%ymm21,%ymm22,%k1 vmovdqu64 0(%rsi),%ymm20 vpblendmq %ymm20,%ymm0,%ymm0{%k1} vmovdqu64 32(%rsi),%ymm20 vpblendmq %ymm20,%ymm1,%ymm1{%k1} vmovdqu64 64(%rsi),%ymm20 vpblendmq %ymm20,%ymm2,%ymm2{%k1} vmovdqu64 96(%rsi),%ymm20 vpblendmq %ymm20,%ymm3,%ymm3{%k1} vmovdqu64 128(%rsi),%ymm20 vpblendmq %ymm20,%ymm4,%ymm4{%k1} vmovdqu64 160(%rsi),%ymm20 vpblendmq %ymm20,%ymm5,%ymm5{%k1} vmovdqu64 192(%rsi),%ymm20 vpblendmq %ymm20,%ymm16,%ymm16{%k1} vmovdqu64 224(%rsi),%ymm20 vpblendmq %ymm20,%ymm17,%ymm17{%k1} vmovdqu64 256(%rsi),%ymm20 vpblendmq %ymm20,%ymm18,%ymm18{%k1} vmovdqu64 288(%rsi),%ymm20 vpblendmq %ymm20,%ymm19,%ymm19{%k1} vpaddq %ymm24,%ymm21,%ymm21 addq $640,%rsi cmpq %rsi,%rax jne L$loop_0 vmovdqu64 %ymm0,0(%rdi) vmovdqu64 %ymm1,32(%rdi) vmovdqu64 %ymm2,64(%rdi) vmovdqu64 %ymm3,96(%rdi) vmovdqu64 %ymm4,128(%rdi) vmovdqu64 %ymm5,160(%rdi) vmovdqu64 %ymm16,192(%rdi) vmovdqu64 %ymm17,224(%rdi) vmovdqu64 %ymm18,256(%rdi) vmovdqu64 %ymm19,288(%rdi) movq %r10,%rsi vpxorq %ymm21,%ymm21,%ymm21 .p2align 5 L$loop_320: vpcmpq $0,%ymm21,%ymm23,%k1 vmovdqu64 320(%rsi),%ymm20 vpblendmq %ymm20,%ymm0,%ymm0{%k1} vmovdqu64 352(%rsi),%ymm20 vpblendmq %ymm20,%ymm1,%ymm1{%k1} vmovdqu64 384(%rsi),%ymm20 vpblendmq %ymm20,%ymm2,%ymm2{%k1} vmovdqu64 416(%rsi),%ymm20 vpblendmq %ymm20,%ymm3,%ymm3{%k1} vmovdqu64 448(%rsi),%ymm20 vpblendmq %ymm20,%ymm4,%ymm4{%k1} vmovdqu64 480(%rsi),%ymm20 vpblendmq %ymm20,%ymm5,%ymm5{%k1} vmovdqu64 512(%rsi),%ymm20 vpblendmq %ymm20,%ymm16,%ymm16{%k1} vmovdqu64 544(%rsi),%ymm20 vpblendmq %ymm20,%ymm17,%ymm17{%k1} vmovdqu64 576(%rsi),%ymm20 vpblendmq %ymm20,%ymm18,%ymm18{%k1} vmovdqu64 608(%rsi),%ymm20 vpblendmq %ymm20,%ymm19,%ymm19{%k1} vpaddq %ymm24,%ymm21,%ymm21 addq $640,%rsi cmpq %rsi,%rax jne L$loop_320 vmovdqu64 %ymm0,320(%rdi) vmovdqu64 %ymm1,352(%rdi) vmovdqu64 %ymm2,384(%rdi) vmovdqu64 %ymm3,416(%rdi) vmovdqu64 %ymm4,448(%rdi) vmovdqu64 %ymm5,480(%rdi) vmovdqu64 %ymm16,512(%rdi) vmovdqu64 %ymm17,544(%rdi) vmovdqu64 %ymm18,576(%rdi) vmovdqu64 %ymm19,608(%rdi) .byte 0xf3,0xc3 .section __DATA,__const .p2align 5 L$ones: .quad 1,1,1,1 L$zeros: .quad 0,0,0,0 .text #endif #endif
marvin-hansen/iggy-streaming-system
28,783
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/rsaz-3k-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl _rsaz_amm52x30_x1_ifma256 .private_extern _rsaz_amm52x30_x1_ifma256 .p2align 5 _rsaz_amm52x30_x1_ifma256: .byte 243,15,30,250 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm6 vmovdqa64 %ymm0,%ymm7 vmovdqa64 %ymm0,%ymm8 vmovdqa64 %ymm0,%ymm9 vmovdqa64 %ymm0,%ymm10 xorl %r9d,%r9d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $7,%ebx .p2align 5 L$loop7: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 8(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 16(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 24(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 leaq 32(%r11),%r11 decl %ebx jne L$loop7 movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 8(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm4,%ymm1 vpsrlq $52,%ymm5,%ymm2 vpsrlq $52,%ymm6,%ymm19 vpsrlq $52,%ymm7,%ymm20 vpsrlq $52,%ymm8,%ymm21 vpsrlq $52,%ymm9,%ymm22 vpsrlq $52,%ymm10,%ymm23 valignq $3,%ymm22,%ymm23,%ymm23 valignq $3,%ymm21,%ymm22,%ymm22 valignq $3,%ymm20,%ymm21,%ymm21 valignq $3,%ymm19,%ymm20,%ymm20 valignq $3,%ymm2,%ymm19,%ymm19 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm5,%ymm5 vpandq L$mask52x4(%rip),%ymm6,%ymm6 vpandq L$mask52x4(%rip),%ymm7,%ymm7 vpandq L$mask52x4(%rip),%ymm8,%ymm8 vpandq L$mask52x4(%rip),%ymm9,%ymm9 vpandq L$mask52x4(%rip),%ymm10,%ymm10 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm4,%ymm4 vpaddq %ymm2,%ymm5,%ymm5 vpaddq %ymm19,%ymm6,%ymm6 vpaddq %ymm20,%ymm7,%ymm7 vpaddq %ymm21,%ymm8,%ymm8 vpaddq %ymm22,%ymm9,%ymm9 vpaddq %ymm23,%ymm10,%ymm10 vpcmpuq $6,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm4,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,L$mask52x4(%rip),%ymm5,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm6,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,L$mask52x4(%rip),%ymm7,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm8,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,L$mask52x4(%rip),%ymm9,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm10,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b vpcmpuq $0,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm4,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,L$mask52x4(%rip),%ymm5,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm6,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,L$mask52x4(%rip),%ymm7,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm8,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,L$mask52x4(%rip),%ymm9,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm10,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq L$mask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq L$mask52x4(%rip),%ymm4,%ymm4{%k2} vpsubq L$mask52x4(%rip),%ymm5,%ymm5{%k3} vpsubq L$mask52x4(%rip),%ymm6,%ymm6{%k4} vpsubq L$mask52x4(%rip),%ymm7,%ymm7{%k5} vpsubq L$mask52x4(%rip),%ymm8,%ymm8{%k6} vpsubq L$mask52x4(%rip),%ymm9,%ymm9{%k7} vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm5,%ymm5 vpandq L$mask52x4(%rip),%ymm6,%ymm6 vpandq L$mask52x4(%rip),%ymm7,%ymm7 vpandq L$mask52x4(%rip),%ymm8,%ymm8 vpandq L$mask52x4(%rip),%ymm9,%ymm9 shrb $4,%r11b kmovb %r11d,%k1 vpsubq L$mask52x4(%rip),%ymm10,%ymm10{%k1} vpandq L$mask52x4(%rip),%ymm10,%ymm10 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm4,32(%rdi) vmovdqu64 %ymm5,64(%rdi) vmovdqu64 %ymm6,96(%rdi) vmovdqu64 %ymm7,128(%rdi) vmovdqu64 %ymm8,160(%rdi) vmovdqu64 %ymm9,192(%rdi) vmovdqu64 %ymm10,224(%rdi) vzeroupper leaq (%rsp),%rax movq 0(%rax),%r15 movq 8(%rax),%r14 movq 16(%rax),%r13 movq 24(%rax),%r12 movq 32(%rax),%rbp movq 40(%rax),%rbx leaq 48(%rax),%rsp L$rsaz_amm52x30_x1_ifma256_epilogue: .byte 0xf3,0xc3 .section __DATA,__const .p2align 5 L$mask52x4: .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .text .text .globl _rsaz_amm52x30_x2_ifma256 .private_extern _rsaz_amm52x30_x2_ifma256 .p2align 5 _rsaz_amm52x30_x2_ifma256: .byte 243,15,30,250 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm6 vmovdqa64 %ymm0,%ymm7 vmovdqa64 %ymm0,%ymm8 vmovdqa64 %ymm0,%ymm9 vmovdqa64 %ymm0,%ymm10 vmovdqa64 %ymm0,%ymm11 vmovdqa64 %ymm0,%ymm12 vmovdqa64 %ymm0,%ymm13 vmovdqa64 %ymm0,%ymm14 vmovdqa64 %ymm0,%ymm15 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 xorl %r9d,%r9d xorl %r15d,%r15d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $30,%ebx .p2align 5 L$loop30: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq (%r8),%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 256(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 256(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 movq %r12,%r10 adcq $0,%r10 movq 8(%r8),%r13 imulq %r15,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 256(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 adcq %r12,%r10 shrq $52,%r15 salq $12,%r10 orq %r10,%r15 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 320(%rsi),%ymm1,%ymm13 vpmadd52luq 352(%rsi),%ymm1,%ymm14 vpmadd52luq 384(%rsi),%ymm1,%ymm15 vpmadd52luq 416(%rsi),%ymm1,%ymm16 vpmadd52luq 448(%rsi),%ymm1,%ymm17 vpmadd52luq 480(%rsi),%ymm1,%ymm18 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 vpmadd52luq 320(%rcx),%ymm2,%ymm13 vpmadd52luq 352(%rcx),%ymm2,%ymm14 vpmadd52luq 384(%rcx),%ymm2,%ymm15 vpmadd52luq 416(%rcx),%ymm2,%ymm16 vpmadd52luq 448(%rcx),%ymm2,%ymm17 vpmadd52luq 480(%rcx),%ymm2,%ymm18 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm13,%ymm12 valignq $1,%ymm13,%ymm14,%ymm13 valignq $1,%ymm14,%ymm15,%ymm14 valignq $1,%ymm15,%ymm16,%ymm15 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm0,%ymm18 vmovq %xmm11,%r13 addq %r13,%r15 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 320(%rsi),%ymm1,%ymm13 vpmadd52huq 352(%rsi),%ymm1,%ymm14 vpmadd52huq 384(%rsi),%ymm1,%ymm15 vpmadd52huq 416(%rsi),%ymm1,%ymm16 vpmadd52huq 448(%rsi),%ymm1,%ymm17 vpmadd52huq 480(%rsi),%ymm1,%ymm18 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 vpmadd52huq 320(%rcx),%ymm2,%ymm13 vpmadd52huq 352(%rcx),%ymm2,%ymm14 vpmadd52huq 384(%rcx),%ymm2,%ymm15 vpmadd52huq 416(%rcx),%ymm2,%ymm16 vpmadd52huq 448(%rcx),%ymm2,%ymm17 vpmadd52huq 480(%rcx),%ymm2,%ymm18 leaq 8(%r11),%r11 decl %ebx jne L$loop30 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm4,%ymm1 vpsrlq $52,%ymm5,%ymm2 vpsrlq $52,%ymm6,%ymm19 vpsrlq $52,%ymm7,%ymm20 vpsrlq $52,%ymm8,%ymm21 vpsrlq $52,%ymm9,%ymm22 vpsrlq $52,%ymm10,%ymm23 valignq $3,%ymm22,%ymm23,%ymm23 valignq $3,%ymm21,%ymm22,%ymm22 valignq $3,%ymm20,%ymm21,%ymm21 valignq $3,%ymm19,%ymm20,%ymm20 valignq $3,%ymm2,%ymm19,%ymm19 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm5,%ymm5 vpandq L$mask52x4(%rip),%ymm6,%ymm6 vpandq L$mask52x4(%rip),%ymm7,%ymm7 vpandq L$mask52x4(%rip),%ymm8,%ymm8 vpandq L$mask52x4(%rip),%ymm9,%ymm9 vpandq L$mask52x4(%rip),%ymm10,%ymm10 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm4,%ymm4 vpaddq %ymm2,%ymm5,%ymm5 vpaddq %ymm19,%ymm6,%ymm6 vpaddq %ymm20,%ymm7,%ymm7 vpaddq %ymm21,%ymm8,%ymm8 vpaddq %ymm22,%ymm9,%ymm9 vpaddq %ymm23,%ymm10,%ymm10 vpcmpuq $6,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm4,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,L$mask52x4(%rip),%ymm5,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm6,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,L$mask52x4(%rip),%ymm7,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm8,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,L$mask52x4(%rip),%ymm9,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm10,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b vpcmpuq $0,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm4,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,L$mask52x4(%rip),%ymm5,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm6,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,L$mask52x4(%rip),%ymm7,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm8,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,L$mask52x4(%rip),%ymm9,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm10,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq L$mask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq L$mask52x4(%rip),%ymm4,%ymm4{%k2} vpsubq L$mask52x4(%rip),%ymm5,%ymm5{%k3} vpsubq L$mask52x4(%rip),%ymm6,%ymm6{%k4} vpsubq L$mask52x4(%rip),%ymm7,%ymm7{%k5} vpsubq L$mask52x4(%rip),%ymm8,%ymm8{%k6} vpsubq L$mask52x4(%rip),%ymm9,%ymm9{%k7} vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm5,%ymm5 vpandq L$mask52x4(%rip),%ymm6,%ymm6 vpandq L$mask52x4(%rip),%ymm7,%ymm7 vpandq L$mask52x4(%rip),%ymm8,%ymm8 vpandq L$mask52x4(%rip),%ymm9,%ymm9 shrb $4,%r11b kmovb %r11d,%k1 vpsubq L$mask52x4(%rip),%ymm10,%ymm10{%k1} vpandq L$mask52x4(%rip),%ymm10,%ymm10 vpbroadcastq %r15,%ymm0 vpblendd $3,%ymm0,%ymm11,%ymm11 vpsrlq $52,%ymm11,%ymm0 vpsrlq $52,%ymm12,%ymm1 vpsrlq $52,%ymm13,%ymm2 vpsrlq $52,%ymm14,%ymm19 vpsrlq $52,%ymm15,%ymm20 vpsrlq $52,%ymm16,%ymm21 vpsrlq $52,%ymm17,%ymm22 vpsrlq $52,%ymm18,%ymm23 valignq $3,%ymm22,%ymm23,%ymm23 valignq $3,%ymm21,%ymm22,%ymm22 valignq $3,%ymm20,%ymm21,%ymm21 valignq $3,%ymm19,%ymm20,%ymm20 valignq $3,%ymm2,%ymm19,%ymm19 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm11,%ymm11 vpandq L$mask52x4(%rip),%ymm12,%ymm12 vpandq L$mask52x4(%rip),%ymm13,%ymm13 vpandq L$mask52x4(%rip),%ymm14,%ymm14 vpandq L$mask52x4(%rip),%ymm15,%ymm15 vpandq L$mask52x4(%rip),%ymm16,%ymm16 vpandq L$mask52x4(%rip),%ymm17,%ymm17 vpandq L$mask52x4(%rip),%ymm18,%ymm18 vpaddq %ymm0,%ymm11,%ymm11 vpaddq %ymm1,%ymm12,%ymm12 vpaddq %ymm2,%ymm13,%ymm13 vpaddq %ymm19,%ymm14,%ymm14 vpaddq %ymm20,%ymm15,%ymm15 vpaddq %ymm21,%ymm16,%ymm16 vpaddq %ymm22,%ymm17,%ymm17 vpaddq %ymm23,%ymm18,%ymm18 vpcmpuq $6,L$mask52x4(%rip),%ymm11,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm12,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,L$mask52x4(%rip),%ymm13,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm14,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,L$mask52x4(%rip),%ymm15,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm16,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,L$mask52x4(%rip),%ymm17,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm18,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b vpcmpuq $0,L$mask52x4(%rip),%ymm11,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm12,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,L$mask52x4(%rip),%ymm13,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm14,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,L$mask52x4(%rip),%ymm15,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm16,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,L$mask52x4(%rip),%ymm17,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm18,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq L$mask52x4(%rip),%ymm11,%ymm11{%k1} vpsubq L$mask52x4(%rip),%ymm12,%ymm12{%k2} vpsubq L$mask52x4(%rip),%ymm13,%ymm13{%k3} vpsubq L$mask52x4(%rip),%ymm14,%ymm14{%k4} vpsubq L$mask52x4(%rip),%ymm15,%ymm15{%k5} vpsubq L$mask52x4(%rip),%ymm16,%ymm16{%k6} vpsubq L$mask52x4(%rip),%ymm17,%ymm17{%k7} vpandq L$mask52x4(%rip),%ymm11,%ymm11 vpandq L$mask52x4(%rip),%ymm12,%ymm12 vpandq L$mask52x4(%rip),%ymm13,%ymm13 vpandq L$mask52x4(%rip),%ymm14,%ymm14 vpandq L$mask52x4(%rip),%ymm15,%ymm15 vpandq L$mask52x4(%rip),%ymm16,%ymm16 vpandq L$mask52x4(%rip),%ymm17,%ymm17 shrb $4,%r11b kmovb %r11d,%k1 vpsubq L$mask52x4(%rip),%ymm18,%ymm18{%k1} vpandq L$mask52x4(%rip),%ymm18,%ymm18 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm4,32(%rdi) vmovdqu64 %ymm5,64(%rdi) vmovdqu64 %ymm6,96(%rdi) vmovdqu64 %ymm7,128(%rdi) vmovdqu64 %ymm8,160(%rdi) vmovdqu64 %ymm9,192(%rdi) vmovdqu64 %ymm10,224(%rdi) vmovdqu64 %ymm11,256(%rdi) vmovdqu64 %ymm12,288(%rdi) vmovdqu64 %ymm13,320(%rdi) vmovdqu64 %ymm14,352(%rdi) vmovdqu64 %ymm15,384(%rdi) vmovdqu64 %ymm16,416(%rdi) vmovdqu64 %ymm17,448(%rdi) vmovdqu64 %ymm18,480(%rdi) vzeroupper leaq (%rsp),%rax movq 0(%rax),%r15 movq 8(%rax),%r14 movq 16(%rax),%r13 movq 24(%rax),%r12 movq 32(%rax),%rbp movq 40(%rax),%rbx leaq 48(%rax),%rsp L$rsaz_amm52x30_x2_ifma256_epilogue: .byte 0xf3,0xc3 .text .p2align 5 .globl _extract_multiplier_2x30_win5 .private_extern _extract_multiplier_2x30_win5 _extract_multiplier_2x30_win5: .byte 243,15,30,250 vmovdqa64 L$ones(%rip),%ymm30 vpbroadcastq %rdx,%ymm28 vpbroadcastq %rcx,%ymm29 leaq 16384(%rsi),%rax vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %ymm0,%ymm27 vmovdqa64 %ymm0,%ymm1 vmovdqa64 %ymm0,%ymm2 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 vmovdqa64 %ymm0,%ymm20 vmovdqa64 %ymm0,%ymm21 vmovdqa64 %ymm0,%ymm22 vmovdqa64 %ymm0,%ymm23 vmovdqa64 %ymm0,%ymm24 vmovdqa64 %ymm0,%ymm25 .p2align 5 L$loop: vpcmpq $0,%ymm27,%ymm28,%k1 vpcmpq $0,%ymm27,%ymm29,%k2 vmovdqu64 0(%rsi),%ymm26 vpblendmq %ymm26,%ymm0,%ymm0{%k1} vmovdqu64 32(%rsi),%ymm26 vpblendmq %ymm26,%ymm1,%ymm1{%k1} vmovdqu64 64(%rsi),%ymm26 vpblendmq %ymm26,%ymm2,%ymm2{%k1} vmovdqu64 96(%rsi),%ymm26 vpblendmq %ymm26,%ymm3,%ymm3{%k1} vmovdqu64 128(%rsi),%ymm26 vpblendmq %ymm26,%ymm4,%ymm4{%k1} vmovdqu64 160(%rsi),%ymm26 vpblendmq %ymm26,%ymm5,%ymm5{%k1} vmovdqu64 192(%rsi),%ymm26 vpblendmq %ymm26,%ymm16,%ymm16{%k1} vmovdqu64 224(%rsi),%ymm26 vpblendmq %ymm26,%ymm17,%ymm17{%k1} vmovdqu64 256(%rsi),%ymm26 vpblendmq %ymm26,%ymm18,%ymm18{%k2} vmovdqu64 288(%rsi),%ymm26 vpblendmq %ymm26,%ymm19,%ymm19{%k2} vmovdqu64 320(%rsi),%ymm26 vpblendmq %ymm26,%ymm20,%ymm20{%k2} vmovdqu64 352(%rsi),%ymm26 vpblendmq %ymm26,%ymm21,%ymm21{%k2} vmovdqu64 384(%rsi),%ymm26 vpblendmq %ymm26,%ymm22,%ymm22{%k2} vmovdqu64 416(%rsi),%ymm26 vpblendmq %ymm26,%ymm23,%ymm23{%k2} vmovdqu64 448(%rsi),%ymm26 vpblendmq %ymm26,%ymm24,%ymm24{%k2} vmovdqu64 480(%rsi),%ymm26 vpblendmq %ymm26,%ymm25,%ymm25{%k2} vpaddq %ymm30,%ymm27,%ymm27 addq $512,%rsi cmpq %rsi,%rax jne L$loop vmovdqu64 %ymm0,0(%rdi) vmovdqu64 %ymm1,32(%rdi) vmovdqu64 %ymm2,64(%rdi) vmovdqu64 %ymm3,96(%rdi) vmovdqu64 %ymm4,128(%rdi) vmovdqu64 %ymm5,160(%rdi) vmovdqu64 %ymm16,192(%rdi) vmovdqu64 %ymm17,224(%rdi) vmovdqu64 %ymm18,256(%rdi) vmovdqu64 %ymm19,288(%rdi) vmovdqu64 %ymm20,320(%rdi) vmovdqu64 %ymm21,352(%rdi) vmovdqu64 %ymm22,384(%rdi) vmovdqu64 %ymm23,416(%rdi) vmovdqu64 %ymm24,448(%rdi) vmovdqu64 %ymm25,480(%rdi) .byte 0xf3,0xc3 .section __DATA,__const .p2align 5 L$ones: .quad 1,1,1,1 L$zeros: .quad 0,0,0,0 .text #endif #endif
marvin-hansen/iggy-streaming-system
47,728
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/sha512-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _sha512_block_data_order_nohw .private_extern _sha512_block_data_order_nohw .p2align 4 _sha512_block_data_order_nohw: _CET_ENDBR movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 shlq $4,%rdx subq $128+32,%rsp leaq (%rsi,%rdx,8),%rdx andq $-64,%rsp movq %rdi,128+0(%rsp) movq %rsi,128+8(%rsp) movq %rdx,128+16(%rsp) movq %rax,152(%rsp) L$prologue: movq 0(%rdi),%rax movq 8(%rdi),%rbx movq 16(%rdi),%rcx movq 24(%rdi),%rdx movq 32(%rdi),%r8 movq 40(%rdi),%r9 movq 48(%rdi),%r10 movq 56(%rdi),%r11 jmp L$loop .p2align 4 L$loop: movq %rbx,%rdi leaq K512(%rip),%rbp xorq %rcx,%rdi movq 0(%rsi),%r12 movq %r8,%r13 movq %rax,%r14 bswapq %r12 rorq $23,%r13 movq %r9,%r15 xorq %r8,%r13 rorq $5,%r14 xorq %r10,%r15 movq %r12,0(%rsp) xorq %rax,%r14 andq %r8,%r15 rorq $4,%r13 addq %r11,%r12 xorq %r10,%r15 rorq $6,%r14 xorq %r8,%r13 addq %r15,%r12 movq %rax,%r15 addq (%rbp),%r12 xorq %rax,%r14 xorq %rbx,%r15 rorq $14,%r13 movq %rbx,%r11 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r11 addq %r12,%rdx addq %r12,%r11 leaq 8(%rbp),%rbp addq %r14,%r11 movq 8(%rsi),%r12 movq %rdx,%r13 movq %r11,%r14 bswapq %r12 rorq $23,%r13 movq %r8,%rdi xorq %rdx,%r13 rorq $5,%r14 xorq %r9,%rdi movq %r12,8(%rsp) xorq %r11,%r14 andq %rdx,%rdi rorq $4,%r13 addq %r10,%r12 xorq %r9,%rdi rorq $6,%r14 xorq %rdx,%r13 addq %rdi,%r12 movq %r11,%rdi addq (%rbp),%r12 xorq %r11,%r14 xorq %rax,%rdi rorq $14,%r13 movq %rax,%r10 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r10 addq %r12,%rcx addq %r12,%r10 leaq 24(%rbp),%rbp addq %r14,%r10 movq 16(%rsi),%r12 movq %rcx,%r13 movq %r10,%r14 bswapq %r12 rorq $23,%r13 movq %rdx,%r15 xorq %rcx,%r13 rorq $5,%r14 xorq %r8,%r15 movq %r12,16(%rsp) xorq %r10,%r14 andq %rcx,%r15 rorq $4,%r13 addq %r9,%r12 xorq %r8,%r15 rorq $6,%r14 xorq %rcx,%r13 addq %r15,%r12 movq %r10,%r15 addq (%rbp),%r12 xorq %r10,%r14 xorq %r11,%r15 rorq $14,%r13 movq %r11,%r9 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r9 addq %r12,%rbx addq %r12,%r9 leaq 8(%rbp),%rbp addq %r14,%r9 movq 24(%rsi),%r12 movq %rbx,%r13 movq %r9,%r14 bswapq %r12 rorq $23,%r13 movq %rcx,%rdi xorq %rbx,%r13 rorq $5,%r14 xorq %rdx,%rdi movq %r12,24(%rsp) xorq %r9,%r14 andq %rbx,%rdi rorq $4,%r13 addq %r8,%r12 xorq %rdx,%rdi rorq $6,%r14 xorq %rbx,%r13 addq %rdi,%r12 movq %r9,%rdi addq (%rbp),%r12 xorq %r9,%r14 xorq %r10,%rdi rorq $14,%r13 movq %r10,%r8 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r8 addq %r12,%rax addq %r12,%r8 leaq 24(%rbp),%rbp addq %r14,%r8 movq 32(%rsi),%r12 movq %rax,%r13 movq %r8,%r14 bswapq %r12 rorq $23,%r13 movq %rbx,%r15 xorq %rax,%r13 rorq $5,%r14 xorq %rcx,%r15 movq %r12,32(%rsp) xorq %r8,%r14 andq %rax,%r15 rorq $4,%r13 addq %rdx,%r12 xorq %rcx,%r15 rorq $6,%r14 xorq %rax,%r13 addq %r15,%r12 movq %r8,%r15 addq (%rbp),%r12 xorq %r8,%r14 xorq %r9,%r15 rorq $14,%r13 movq %r9,%rdx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rdx addq %r12,%r11 addq %r12,%rdx leaq 8(%rbp),%rbp addq %r14,%rdx movq 40(%rsi),%r12 movq %r11,%r13 movq %rdx,%r14 bswapq %r12 rorq $23,%r13 movq %rax,%rdi xorq %r11,%r13 rorq $5,%r14 xorq %rbx,%rdi movq %r12,40(%rsp) xorq %rdx,%r14 andq %r11,%rdi rorq $4,%r13 addq %rcx,%r12 xorq %rbx,%rdi rorq $6,%r14 xorq %r11,%r13 addq %rdi,%r12 movq %rdx,%rdi addq (%rbp),%r12 xorq %rdx,%r14 xorq %r8,%rdi rorq $14,%r13 movq %r8,%rcx andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rcx addq %r12,%r10 addq %r12,%rcx leaq 24(%rbp),%rbp addq %r14,%rcx movq 48(%rsi),%r12 movq %r10,%r13 movq %rcx,%r14 bswapq %r12 rorq $23,%r13 movq %r11,%r15 xorq %r10,%r13 rorq $5,%r14 xorq %rax,%r15 movq %r12,48(%rsp) xorq %rcx,%r14 andq %r10,%r15 rorq $4,%r13 addq %rbx,%r12 xorq %rax,%r15 rorq $6,%r14 xorq %r10,%r13 addq %r15,%r12 movq %rcx,%r15 addq (%rbp),%r12 xorq %rcx,%r14 xorq %rdx,%r15 rorq $14,%r13 movq %rdx,%rbx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rbx addq %r12,%r9 addq %r12,%rbx leaq 8(%rbp),%rbp addq %r14,%rbx movq 56(%rsi),%r12 movq %r9,%r13 movq %rbx,%r14 bswapq %r12 rorq $23,%r13 movq %r10,%rdi xorq %r9,%r13 rorq $5,%r14 xorq %r11,%rdi movq %r12,56(%rsp) xorq %rbx,%r14 andq %r9,%rdi rorq $4,%r13 addq %rax,%r12 xorq %r11,%rdi rorq $6,%r14 xorq %r9,%r13 addq %rdi,%r12 movq %rbx,%rdi addq (%rbp),%r12 xorq %rbx,%r14 xorq %rcx,%rdi rorq $14,%r13 movq %rcx,%rax andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rax addq %r12,%r8 addq %r12,%rax leaq 24(%rbp),%rbp addq %r14,%rax movq 64(%rsi),%r12 movq %r8,%r13 movq %rax,%r14 bswapq %r12 rorq $23,%r13 movq %r9,%r15 xorq %r8,%r13 rorq $5,%r14 xorq %r10,%r15 movq %r12,64(%rsp) xorq %rax,%r14 andq %r8,%r15 rorq $4,%r13 addq %r11,%r12 xorq %r10,%r15 rorq $6,%r14 xorq %r8,%r13 addq %r15,%r12 movq %rax,%r15 addq (%rbp),%r12 xorq %rax,%r14 xorq %rbx,%r15 rorq $14,%r13 movq %rbx,%r11 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r11 addq %r12,%rdx addq %r12,%r11 leaq 8(%rbp),%rbp addq %r14,%r11 movq 72(%rsi),%r12 movq %rdx,%r13 movq %r11,%r14 bswapq %r12 rorq $23,%r13 movq %r8,%rdi xorq %rdx,%r13 rorq $5,%r14 xorq %r9,%rdi movq %r12,72(%rsp) xorq %r11,%r14 andq %rdx,%rdi rorq $4,%r13 addq %r10,%r12 xorq %r9,%rdi rorq $6,%r14 xorq %rdx,%r13 addq %rdi,%r12 movq %r11,%rdi addq (%rbp),%r12 xorq %r11,%r14 xorq %rax,%rdi rorq $14,%r13 movq %rax,%r10 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r10 addq %r12,%rcx addq %r12,%r10 leaq 24(%rbp),%rbp addq %r14,%r10 movq 80(%rsi),%r12 movq %rcx,%r13 movq %r10,%r14 bswapq %r12 rorq $23,%r13 movq %rdx,%r15 xorq %rcx,%r13 rorq $5,%r14 xorq %r8,%r15 movq %r12,80(%rsp) xorq %r10,%r14 andq %rcx,%r15 rorq $4,%r13 addq %r9,%r12 xorq %r8,%r15 rorq $6,%r14 xorq %rcx,%r13 addq %r15,%r12 movq %r10,%r15 addq (%rbp),%r12 xorq %r10,%r14 xorq %r11,%r15 rorq $14,%r13 movq %r11,%r9 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r9 addq %r12,%rbx addq %r12,%r9 leaq 8(%rbp),%rbp addq %r14,%r9 movq 88(%rsi),%r12 movq %rbx,%r13 movq %r9,%r14 bswapq %r12 rorq $23,%r13 movq %rcx,%rdi xorq %rbx,%r13 rorq $5,%r14 xorq %rdx,%rdi movq %r12,88(%rsp) xorq %r9,%r14 andq %rbx,%rdi rorq $4,%r13 addq %r8,%r12 xorq %rdx,%rdi rorq $6,%r14 xorq %rbx,%r13 addq %rdi,%r12 movq %r9,%rdi addq (%rbp),%r12 xorq %r9,%r14 xorq %r10,%rdi rorq $14,%r13 movq %r10,%r8 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r8 addq %r12,%rax addq %r12,%r8 leaq 24(%rbp),%rbp addq %r14,%r8 movq 96(%rsi),%r12 movq %rax,%r13 movq %r8,%r14 bswapq %r12 rorq $23,%r13 movq %rbx,%r15 xorq %rax,%r13 rorq $5,%r14 xorq %rcx,%r15 movq %r12,96(%rsp) xorq %r8,%r14 andq %rax,%r15 rorq $4,%r13 addq %rdx,%r12 xorq %rcx,%r15 rorq $6,%r14 xorq %rax,%r13 addq %r15,%r12 movq %r8,%r15 addq (%rbp),%r12 xorq %r8,%r14 xorq %r9,%r15 rorq $14,%r13 movq %r9,%rdx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rdx addq %r12,%r11 addq %r12,%rdx leaq 8(%rbp),%rbp addq %r14,%rdx movq 104(%rsi),%r12 movq %r11,%r13 movq %rdx,%r14 bswapq %r12 rorq $23,%r13 movq %rax,%rdi xorq %r11,%r13 rorq $5,%r14 xorq %rbx,%rdi movq %r12,104(%rsp) xorq %rdx,%r14 andq %r11,%rdi rorq $4,%r13 addq %rcx,%r12 xorq %rbx,%rdi rorq $6,%r14 xorq %r11,%r13 addq %rdi,%r12 movq %rdx,%rdi addq (%rbp),%r12 xorq %rdx,%r14 xorq %r8,%rdi rorq $14,%r13 movq %r8,%rcx andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rcx addq %r12,%r10 addq %r12,%rcx leaq 24(%rbp),%rbp addq %r14,%rcx movq 112(%rsi),%r12 movq %r10,%r13 movq %rcx,%r14 bswapq %r12 rorq $23,%r13 movq %r11,%r15 xorq %r10,%r13 rorq $5,%r14 xorq %rax,%r15 movq %r12,112(%rsp) xorq %rcx,%r14 andq %r10,%r15 rorq $4,%r13 addq %rbx,%r12 xorq %rax,%r15 rorq $6,%r14 xorq %r10,%r13 addq %r15,%r12 movq %rcx,%r15 addq (%rbp),%r12 xorq %rcx,%r14 xorq %rdx,%r15 rorq $14,%r13 movq %rdx,%rbx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rbx addq %r12,%r9 addq %r12,%rbx leaq 8(%rbp),%rbp addq %r14,%rbx movq 120(%rsi),%r12 movq %r9,%r13 movq %rbx,%r14 bswapq %r12 rorq $23,%r13 movq %r10,%rdi xorq %r9,%r13 rorq $5,%r14 xorq %r11,%rdi movq %r12,120(%rsp) xorq %rbx,%r14 andq %r9,%rdi rorq $4,%r13 addq %rax,%r12 xorq %r11,%rdi rorq $6,%r14 xorq %r9,%r13 addq %rdi,%r12 movq %rbx,%rdi addq (%rbp),%r12 xorq %rbx,%r14 xorq %rcx,%rdi rorq $14,%r13 movq %rcx,%rax andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rax addq %r12,%r8 addq %r12,%rax leaq 24(%rbp),%rbp jmp L$rounds_16_xx .p2align 4 L$rounds_16_xx: movq 8(%rsp),%r13 movq 112(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%rax movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 72(%rsp),%r12 addq 0(%rsp),%r12 movq %r8,%r13 addq %r15,%r12 movq %rax,%r14 rorq $23,%r13 movq %r9,%r15 xorq %r8,%r13 rorq $5,%r14 xorq %r10,%r15 movq %r12,0(%rsp) xorq %rax,%r14 andq %r8,%r15 rorq $4,%r13 addq %r11,%r12 xorq %r10,%r15 rorq $6,%r14 xorq %r8,%r13 addq %r15,%r12 movq %rax,%r15 addq (%rbp),%r12 xorq %rax,%r14 xorq %rbx,%r15 rorq $14,%r13 movq %rbx,%r11 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r11 addq %r12,%rdx addq %r12,%r11 leaq 8(%rbp),%rbp movq 16(%rsp),%r13 movq 120(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%r11 movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 80(%rsp),%r12 addq 8(%rsp),%r12 movq %rdx,%r13 addq %rdi,%r12 movq %r11,%r14 rorq $23,%r13 movq %r8,%rdi xorq %rdx,%r13 rorq $5,%r14 xorq %r9,%rdi movq %r12,8(%rsp) xorq %r11,%r14 andq %rdx,%rdi rorq $4,%r13 addq %r10,%r12 xorq %r9,%rdi rorq $6,%r14 xorq %rdx,%r13 addq %rdi,%r12 movq %r11,%rdi addq (%rbp),%r12 xorq %r11,%r14 xorq %rax,%rdi rorq $14,%r13 movq %rax,%r10 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r10 addq %r12,%rcx addq %r12,%r10 leaq 24(%rbp),%rbp movq 24(%rsp),%r13 movq 0(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%r10 movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 88(%rsp),%r12 addq 16(%rsp),%r12 movq %rcx,%r13 addq %r15,%r12 movq %r10,%r14 rorq $23,%r13 movq %rdx,%r15 xorq %rcx,%r13 rorq $5,%r14 xorq %r8,%r15 movq %r12,16(%rsp) xorq %r10,%r14 andq %rcx,%r15 rorq $4,%r13 addq %r9,%r12 xorq %r8,%r15 rorq $6,%r14 xorq %rcx,%r13 addq %r15,%r12 movq %r10,%r15 addq (%rbp),%r12 xorq %r10,%r14 xorq %r11,%r15 rorq $14,%r13 movq %r11,%r9 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r9 addq %r12,%rbx addq %r12,%r9 leaq 8(%rbp),%rbp movq 32(%rsp),%r13 movq 8(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%r9 movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 96(%rsp),%r12 addq 24(%rsp),%r12 movq %rbx,%r13 addq %rdi,%r12 movq %r9,%r14 rorq $23,%r13 movq %rcx,%rdi xorq %rbx,%r13 rorq $5,%r14 xorq %rdx,%rdi movq %r12,24(%rsp) xorq %r9,%r14 andq %rbx,%rdi rorq $4,%r13 addq %r8,%r12 xorq %rdx,%rdi rorq $6,%r14 xorq %rbx,%r13 addq %rdi,%r12 movq %r9,%rdi addq (%rbp),%r12 xorq %r9,%r14 xorq %r10,%rdi rorq $14,%r13 movq %r10,%r8 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r8 addq %r12,%rax addq %r12,%r8 leaq 24(%rbp),%rbp movq 40(%rsp),%r13 movq 16(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%r8 movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 104(%rsp),%r12 addq 32(%rsp),%r12 movq %rax,%r13 addq %r15,%r12 movq %r8,%r14 rorq $23,%r13 movq %rbx,%r15 xorq %rax,%r13 rorq $5,%r14 xorq %rcx,%r15 movq %r12,32(%rsp) xorq %r8,%r14 andq %rax,%r15 rorq $4,%r13 addq %rdx,%r12 xorq %rcx,%r15 rorq $6,%r14 xorq %rax,%r13 addq %r15,%r12 movq %r8,%r15 addq (%rbp),%r12 xorq %r8,%r14 xorq %r9,%r15 rorq $14,%r13 movq %r9,%rdx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rdx addq %r12,%r11 addq %r12,%rdx leaq 8(%rbp),%rbp movq 48(%rsp),%r13 movq 24(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%rdx movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 112(%rsp),%r12 addq 40(%rsp),%r12 movq %r11,%r13 addq %rdi,%r12 movq %rdx,%r14 rorq $23,%r13 movq %rax,%rdi xorq %r11,%r13 rorq $5,%r14 xorq %rbx,%rdi movq %r12,40(%rsp) xorq %rdx,%r14 andq %r11,%rdi rorq $4,%r13 addq %rcx,%r12 xorq %rbx,%rdi rorq $6,%r14 xorq %r11,%r13 addq %rdi,%r12 movq %rdx,%rdi addq (%rbp),%r12 xorq %rdx,%r14 xorq %r8,%rdi rorq $14,%r13 movq %r8,%rcx andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rcx addq %r12,%r10 addq %r12,%rcx leaq 24(%rbp),%rbp movq 56(%rsp),%r13 movq 32(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%rcx movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 120(%rsp),%r12 addq 48(%rsp),%r12 movq %r10,%r13 addq %r15,%r12 movq %rcx,%r14 rorq $23,%r13 movq %r11,%r15 xorq %r10,%r13 rorq $5,%r14 xorq %rax,%r15 movq %r12,48(%rsp) xorq %rcx,%r14 andq %r10,%r15 rorq $4,%r13 addq %rbx,%r12 xorq %rax,%r15 rorq $6,%r14 xorq %r10,%r13 addq %r15,%r12 movq %rcx,%r15 addq (%rbp),%r12 xorq %rcx,%r14 xorq %rdx,%r15 rorq $14,%r13 movq %rdx,%rbx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rbx addq %r12,%r9 addq %r12,%rbx leaq 8(%rbp),%rbp movq 64(%rsp),%r13 movq 40(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%rbx movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 0(%rsp),%r12 addq 56(%rsp),%r12 movq %r9,%r13 addq %rdi,%r12 movq %rbx,%r14 rorq $23,%r13 movq %r10,%rdi xorq %r9,%r13 rorq $5,%r14 xorq %r11,%rdi movq %r12,56(%rsp) xorq %rbx,%r14 andq %r9,%rdi rorq $4,%r13 addq %rax,%r12 xorq %r11,%rdi rorq $6,%r14 xorq %r9,%r13 addq %rdi,%r12 movq %rbx,%rdi addq (%rbp),%r12 xorq %rbx,%r14 xorq %rcx,%rdi rorq $14,%r13 movq %rcx,%rax andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rax addq %r12,%r8 addq %r12,%rax leaq 24(%rbp),%rbp movq 72(%rsp),%r13 movq 48(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%rax movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 8(%rsp),%r12 addq 64(%rsp),%r12 movq %r8,%r13 addq %r15,%r12 movq %rax,%r14 rorq $23,%r13 movq %r9,%r15 xorq %r8,%r13 rorq $5,%r14 xorq %r10,%r15 movq %r12,64(%rsp) xorq %rax,%r14 andq %r8,%r15 rorq $4,%r13 addq %r11,%r12 xorq %r10,%r15 rorq $6,%r14 xorq %r8,%r13 addq %r15,%r12 movq %rax,%r15 addq (%rbp),%r12 xorq %rax,%r14 xorq %rbx,%r15 rorq $14,%r13 movq %rbx,%r11 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r11 addq %r12,%rdx addq %r12,%r11 leaq 8(%rbp),%rbp movq 80(%rsp),%r13 movq 56(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%r11 movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 16(%rsp),%r12 addq 72(%rsp),%r12 movq %rdx,%r13 addq %rdi,%r12 movq %r11,%r14 rorq $23,%r13 movq %r8,%rdi xorq %rdx,%r13 rorq $5,%r14 xorq %r9,%rdi movq %r12,72(%rsp) xorq %r11,%r14 andq %rdx,%rdi rorq $4,%r13 addq %r10,%r12 xorq %r9,%rdi rorq $6,%r14 xorq %rdx,%r13 addq %rdi,%r12 movq %r11,%rdi addq (%rbp),%r12 xorq %r11,%r14 xorq %rax,%rdi rorq $14,%r13 movq %rax,%r10 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r10 addq %r12,%rcx addq %r12,%r10 leaq 24(%rbp),%rbp movq 88(%rsp),%r13 movq 64(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%r10 movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 24(%rsp),%r12 addq 80(%rsp),%r12 movq %rcx,%r13 addq %r15,%r12 movq %r10,%r14 rorq $23,%r13 movq %rdx,%r15 xorq %rcx,%r13 rorq $5,%r14 xorq %r8,%r15 movq %r12,80(%rsp) xorq %r10,%r14 andq %rcx,%r15 rorq $4,%r13 addq %r9,%r12 xorq %r8,%r15 rorq $6,%r14 xorq %rcx,%r13 addq %r15,%r12 movq %r10,%r15 addq (%rbp),%r12 xorq %r10,%r14 xorq %r11,%r15 rorq $14,%r13 movq %r11,%r9 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r9 addq %r12,%rbx addq %r12,%r9 leaq 8(%rbp),%rbp movq 96(%rsp),%r13 movq 72(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%r9 movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 32(%rsp),%r12 addq 88(%rsp),%r12 movq %rbx,%r13 addq %rdi,%r12 movq %r9,%r14 rorq $23,%r13 movq %rcx,%rdi xorq %rbx,%r13 rorq $5,%r14 xorq %rdx,%rdi movq %r12,88(%rsp) xorq %r9,%r14 andq %rbx,%rdi rorq $4,%r13 addq %r8,%r12 xorq %rdx,%rdi rorq $6,%r14 xorq %rbx,%r13 addq %rdi,%r12 movq %r9,%rdi addq (%rbp),%r12 xorq %r9,%r14 xorq %r10,%rdi rorq $14,%r13 movq %r10,%r8 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r8 addq %r12,%rax addq %r12,%r8 leaq 24(%rbp),%rbp movq 104(%rsp),%r13 movq 80(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%r8 movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 40(%rsp),%r12 addq 96(%rsp),%r12 movq %rax,%r13 addq %r15,%r12 movq %r8,%r14 rorq $23,%r13 movq %rbx,%r15 xorq %rax,%r13 rorq $5,%r14 xorq %rcx,%r15 movq %r12,96(%rsp) xorq %r8,%r14 andq %rax,%r15 rorq $4,%r13 addq %rdx,%r12 xorq %rcx,%r15 rorq $6,%r14 xorq %rax,%r13 addq %r15,%r12 movq %r8,%r15 addq (%rbp),%r12 xorq %r8,%r14 xorq %r9,%r15 rorq $14,%r13 movq %r9,%rdx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rdx addq %r12,%r11 addq %r12,%rdx leaq 8(%rbp),%rbp movq 112(%rsp),%r13 movq 88(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%rdx movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 48(%rsp),%r12 addq 104(%rsp),%r12 movq %r11,%r13 addq %rdi,%r12 movq %rdx,%r14 rorq $23,%r13 movq %rax,%rdi xorq %r11,%r13 rorq $5,%r14 xorq %rbx,%rdi movq %r12,104(%rsp) xorq %rdx,%r14 andq %r11,%rdi rorq $4,%r13 addq %rcx,%r12 xorq %rbx,%rdi rorq $6,%r14 xorq %r11,%r13 addq %rdi,%r12 movq %rdx,%rdi addq (%rbp),%r12 xorq %rdx,%r14 xorq %r8,%rdi rorq $14,%r13 movq %r8,%rcx andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rcx addq %r12,%r10 addq %r12,%rcx leaq 24(%rbp),%rbp movq 120(%rsp),%r13 movq 96(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%rcx movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 56(%rsp),%r12 addq 112(%rsp),%r12 movq %r10,%r13 addq %r15,%r12 movq %rcx,%r14 rorq $23,%r13 movq %r11,%r15 xorq %r10,%r13 rorq $5,%r14 xorq %rax,%r15 movq %r12,112(%rsp) xorq %rcx,%r14 andq %r10,%r15 rorq $4,%r13 addq %rbx,%r12 xorq %rax,%r15 rorq $6,%r14 xorq %r10,%r13 addq %r15,%r12 movq %rcx,%r15 addq (%rbp),%r12 xorq %rcx,%r14 xorq %rdx,%r15 rorq $14,%r13 movq %rdx,%rbx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rbx addq %r12,%r9 addq %r12,%rbx leaq 8(%rbp),%rbp movq 0(%rsp),%r13 movq 104(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%rbx movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 64(%rsp),%r12 addq 120(%rsp),%r12 movq %r9,%r13 addq %rdi,%r12 movq %rbx,%r14 rorq $23,%r13 movq %r10,%rdi xorq %r9,%r13 rorq $5,%r14 xorq %r11,%rdi movq %r12,120(%rsp) xorq %rbx,%r14 andq %r9,%rdi rorq $4,%r13 addq %rax,%r12 xorq %r11,%rdi rorq $6,%r14 xorq %r9,%r13 addq %rdi,%r12 movq %rbx,%rdi addq (%rbp),%r12 xorq %rbx,%r14 xorq %rcx,%rdi rorq $14,%r13 movq %rcx,%rax andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rax addq %r12,%r8 addq %r12,%rax leaq 24(%rbp),%rbp cmpb $0,7(%rbp) jnz L$rounds_16_xx movq 128+0(%rsp),%rdi addq %r14,%rax leaq 128(%rsi),%rsi addq 0(%rdi),%rax addq 8(%rdi),%rbx addq 16(%rdi),%rcx addq 24(%rdi),%rdx addq 32(%rdi),%r8 addq 40(%rdi),%r9 addq 48(%rdi),%r10 addq 56(%rdi),%r11 cmpq 128+16(%rsp),%rsi movq %rax,0(%rdi) movq %rbx,8(%rdi) movq %rcx,16(%rdi) movq %rdx,24(%rdi) movq %r8,32(%rdi) movq %r9,40(%rdi) movq %r10,48(%rdi) movq %r11,56(%rdi) jb L$loop movq 152(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue: .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 K512: .quad 0x428a2f98d728ae22,0x7137449123ef65cd .quad 0x428a2f98d728ae22,0x7137449123ef65cd .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc .quad 0x3956c25bf348b538,0x59f111f1b605d019 .quad 0x3956c25bf348b538,0x59f111f1b605d019 .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 .quad 0xd807aa98a3030242,0x12835b0145706fbe .quad 0xd807aa98a3030242,0x12835b0145706fbe .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 .quad 0x9bdc06a725c71235,0xc19bf174cf692694 .quad 0x9bdc06a725c71235,0xc19bf174cf692694 .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 .quad 0x983e5152ee66dfab,0xa831c66d2db43210 .quad 0x983e5152ee66dfab,0xa831c66d2db43210 .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 .quad 0x06ca6351e003826f,0x142929670a0e6e70 .quad 0x06ca6351e003826f,0x142929670a0e6e70 .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df .quad 0x650a73548baf63de,0x766a0abb3c77b2a8 .quad 0x650a73548baf63de,0x766a0abb3c77b2a8 .quad 0x81c2c92e47edaee6,0x92722c851482353b .quad 0x81c2c92e47edaee6,0x92722c851482353b .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 .quad 0xc24b8b70d0f89791,0xc76c51a30654be30 .quad 0xc24b8b70d0f89791,0xc76c51a30654be30 .quad 0xd192e819d6ef5218,0xd69906245565a910 .quad 0xd192e819d6ef5218,0xd69906245565a910 .quad 0xf40e35855771202a,0x106aa07032bbd1b8 .quad 0xf40e35855771202a,0x106aa07032bbd1b8 .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 .quad 0x748f82ee5defb2fc,0x78a5636f43172f60 .quad 0x748f82ee5defb2fc,0x78a5636f43172f60 .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec .quad 0x90befffa23631e28,0xa4506cebde82bde9 .quad 0x90befffa23631e28,0xa4506cebde82bde9 .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b .quad 0xca273eceea26619c,0xd186b8c721c0c207 .quad 0xca273eceea26619c,0xd186b8c721c0c207 .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 .quad 0x113f9804bef90dae,0x1b710b35131c471b .quad 0x113f9804bef90dae,0x1b710b35131c471b .quad 0x28db77f523047d84,0x32caab7b40c72493 .quad 0x28db77f523047d84,0x32caab7b40c72493 .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 .quad 0x0001020304050607,0x08090a0b0c0d0e0f .quad 0x0001020304050607,0x08090a0b0c0d0e0f .byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .globl _sha512_block_data_order_avx .private_extern _sha512_block_data_order_avx .p2align 6 _sha512_block_data_order_avx: _CET_ENDBR movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 shlq $4,%rdx subq $160,%rsp leaq (%rsi,%rdx,8),%rdx andq $-64,%rsp movq %rdi,128+0(%rsp) movq %rsi,128+8(%rsp) movq %rdx,128+16(%rsp) movq %rax,152(%rsp) L$prologue_avx: vzeroupper movq 0(%rdi),%rax movq 8(%rdi),%rbx movq 16(%rdi),%rcx movq 24(%rdi),%rdx movq 32(%rdi),%r8 movq 40(%rdi),%r9 movq 48(%rdi),%r10 movq 56(%rdi),%r11 jmp L$loop_avx .p2align 4 L$loop_avx: vmovdqa K512+1280(%rip),%xmm11 vmovdqu 0(%rsi),%xmm0 leaq K512+128(%rip),%rbp vmovdqu 16(%rsi),%xmm1 vmovdqu 32(%rsi),%xmm2 vpshufb %xmm11,%xmm0,%xmm0 vmovdqu 48(%rsi),%xmm3 vpshufb %xmm11,%xmm1,%xmm1 vmovdqu 64(%rsi),%xmm4 vpshufb %xmm11,%xmm2,%xmm2 vmovdqu 80(%rsi),%xmm5 vpshufb %xmm11,%xmm3,%xmm3 vmovdqu 96(%rsi),%xmm6 vpshufb %xmm11,%xmm4,%xmm4 vmovdqu 112(%rsi),%xmm7 vpshufb %xmm11,%xmm5,%xmm5 vpaddq -128(%rbp),%xmm0,%xmm8 vpshufb %xmm11,%xmm6,%xmm6 vpaddq -96(%rbp),%xmm1,%xmm9 vpshufb %xmm11,%xmm7,%xmm7 vpaddq -64(%rbp),%xmm2,%xmm10 vpaddq -32(%rbp),%xmm3,%xmm11 vmovdqa %xmm8,0(%rsp) vpaddq 0(%rbp),%xmm4,%xmm8 vmovdqa %xmm9,16(%rsp) vpaddq 32(%rbp),%xmm5,%xmm9 vmovdqa %xmm10,32(%rsp) vpaddq 64(%rbp),%xmm6,%xmm10 vmovdqa %xmm11,48(%rsp) vpaddq 96(%rbp),%xmm7,%xmm11 vmovdqa %xmm8,64(%rsp) movq %rax,%r14 vmovdqa %xmm9,80(%rsp) movq %rbx,%rdi vmovdqa %xmm10,96(%rsp) xorq %rcx,%rdi vmovdqa %xmm11,112(%rsp) movq %r8,%r13 jmp L$avx_00_47 .p2align 4 L$avx_00_47: addq $256,%rbp vpalignr $8,%xmm0,%xmm1,%xmm8 shrdq $23,%r13,%r13 movq %r14,%rax vpalignr $8,%xmm4,%xmm5,%xmm11 movq %r9,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %r8,%r13 xorq %r10,%r12 vpaddq %xmm11,%xmm0,%xmm0 shrdq $4,%r13,%r13 xorq %rax,%r14 vpsrlq $7,%xmm8,%xmm11 andq %r8,%r12 xorq %r8,%r13 vpsllq $56,%xmm8,%xmm9 addq 0(%rsp),%r11 movq %rax,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %r10,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %rbx,%r15 addq %r12,%r11 vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %rax,%r14 addq %r13,%r11 vpxor %xmm10,%xmm8,%xmm8 xorq %rbx,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm7,%xmm11 addq %r11,%rdx addq %rdi,%r11 vpxor %xmm9,%xmm8,%xmm8 movq %rdx,%r13 addq %r11,%r14 vpsllq $3,%xmm7,%xmm10 shrdq $23,%r13,%r13 movq %r14,%r11 vpaddq %xmm8,%xmm0,%xmm0 movq %r8,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm7,%xmm9 xorq %rdx,%r13 xorq %r9,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %r11,%r14 vpsllq $42,%xmm10,%xmm10 andq %rdx,%r12 xorq %rdx,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 8(%rsp),%r10 movq %r11,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %r9,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %rax,%rdi addq %r12,%r10 vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm0,%xmm0 xorq %r11,%r14 addq %r13,%r10 vpaddq -128(%rbp),%xmm0,%xmm10 xorq %rax,%r15 shrdq $28,%r14,%r14 addq %r10,%rcx addq %r15,%r10 movq %rcx,%r13 addq %r10,%r14 vmovdqa %xmm10,0(%rsp) vpalignr $8,%xmm1,%xmm2,%xmm8 shrdq $23,%r13,%r13 movq %r14,%r10 vpalignr $8,%xmm5,%xmm6,%xmm11 movq %rdx,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %rcx,%r13 xorq %r8,%r12 vpaddq %xmm11,%xmm1,%xmm1 shrdq $4,%r13,%r13 xorq %r10,%r14 vpsrlq $7,%xmm8,%xmm11 andq %rcx,%r12 xorq %rcx,%r13 vpsllq $56,%xmm8,%xmm9 addq 16(%rsp),%r9 movq %r10,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %r8,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %r11,%r15 addq %r12,%r9 vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %r10,%r14 addq %r13,%r9 vpxor %xmm10,%xmm8,%xmm8 xorq %r11,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm0,%xmm11 addq %r9,%rbx addq %rdi,%r9 vpxor %xmm9,%xmm8,%xmm8 movq %rbx,%r13 addq %r9,%r14 vpsllq $3,%xmm0,%xmm10 shrdq $23,%r13,%r13 movq %r14,%r9 vpaddq %xmm8,%xmm1,%xmm1 movq %rcx,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm0,%xmm9 xorq %rbx,%r13 xorq %rdx,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %r9,%r14 vpsllq $42,%xmm10,%xmm10 andq %rbx,%r12 xorq %rbx,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 24(%rsp),%r8 movq %r9,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %rdx,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %r10,%rdi addq %r12,%r8 vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm1,%xmm1 xorq %r9,%r14 addq %r13,%r8 vpaddq -96(%rbp),%xmm1,%xmm10 xorq %r10,%r15 shrdq $28,%r14,%r14 addq %r8,%rax addq %r15,%r8 movq %rax,%r13 addq %r8,%r14 vmovdqa %xmm10,16(%rsp) vpalignr $8,%xmm2,%xmm3,%xmm8 shrdq $23,%r13,%r13 movq %r14,%r8 vpalignr $8,%xmm6,%xmm7,%xmm11 movq %rbx,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %rax,%r13 xorq %rcx,%r12 vpaddq %xmm11,%xmm2,%xmm2 shrdq $4,%r13,%r13 xorq %r8,%r14 vpsrlq $7,%xmm8,%xmm11 andq %rax,%r12 xorq %rax,%r13 vpsllq $56,%xmm8,%xmm9 addq 32(%rsp),%rdx movq %r8,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %rcx,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %r9,%r15 addq %r12,%rdx vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %r8,%r14 addq %r13,%rdx vpxor %xmm10,%xmm8,%xmm8 xorq %r9,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm1,%xmm11 addq %rdx,%r11 addq %rdi,%rdx vpxor %xmm9,%xmm8,%xmm8 movq %r11,%r13 addq %rdx,%r14 vpsllq $3,%xmm1,%xmm10 shrdq $23,%r13,%r13 movq %r14,%rdx vpaddq %xmm8,%xmm2,%xmm2 movq %rax,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm1,%xmm9 xorq %r11,%r13 xorq %rbx,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %rdx,%r14 vpsllq $42,%xmm10,%xmm10 andq %r11,%r12 xorq %r11,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 40(%rsp),%rcx movq %rdx,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %rbx,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %r8,%rdi addq %r12,%rcx vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm2,%xmm2 xorq %rdx,%r14 addq %r13,%rcx vpaddq -64(%rbp),%xmm2,%xmm10 xorq %r8,%r15 shrdq $28,%r14,%r14 addq %rcx,%r10 addq %r15,%rcx movq %r10,%r13 addq %rcx,%r14 vmovdqa %xmm10,32(%rsp) vpalignr $8,%xmm3,%xmm4,%xmm8 shrdq $23,%r13,%r13 movq %r14,%rcx vpalignr $8,%xmm7,%xmm0,%xmm11 movq %r11,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %r10,%r13 xorq %rax,%r12 vpaddq %xmm11,%xmm3,%xmm3 shrdq $4,%r13,%r13 xorq %rcx,%r14 vpsrlq $7,%xmm8,%xmm11 andq %r10,%r12 xorq %r10,%r13 vpsllq $56,%xmm8,%xmm9 addq 48(%rsp),%rbx movq %rcx,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %rax,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %rdx,%r15 addq %r12,%rbx vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %rcx,%r14 addq %r13,%rbx vpxor %xmm10,%xmm8,%xmm8 xorq %rdx,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm2,%xmm11 addq %rbx,%r9 addq %rdi,%rbx vpxor %xmm9,%xmm8,%xmm8 movq %r9,%r13 addq %rbx,%r14 vpsllq $3,%xmm2,%xmm10 shrdq $23,%r13,%r13 movq %r14,%rbx vpaddq %xmm8,%xmm3,%xmm3 movq %r10,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm2,%xmm9 xorq %r9,%r13 xorq %r11,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %rbx,%r14 vpsllq $42,%xmm10,%xmm10 andq %r9,%r12 xorq %r9,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 56(%rsp),%rax movq %rbx,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %r11,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %rcx,%rdi addq %r12,%rax vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm3,%xmm3 xorq %rbx,%r14 addq %r13,%rax vpaddq -32(%rbp),%xmm3,%xmm10 xorq %rcx,%r15 shrdq $28,%r14,%r14 addq %rax,%r8 addq %r15,%rax movq %r8,%r13 addq %rax,%r14 vmovdqa %xmm10,48(%rsp) vpalignr $8,%xmm4,%xmm5,%xmm8 shrdq $23,%r13,%r13 movq %r14,%rax vpalignr $8,%xmm0,%xmm1,%xmm11 movq %r9,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %r8,%r13 xorq %r10,%r12 vpaddq %xmm11,%xmm4,%xmm4 shrdq $4,%r13,%r13 xorq %rax,%r14 vpsrlq $7,%xmm8,%xmm11 andq %r8,%r12 xorq %r8,%r13 vpsllq $56,%xmm8,%xmm9 addq 64(%rsp),%r11 movq %rax,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %r10,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %rbx,%r15 addq %r12,%r11 vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %rax,%r14 addq %r13,%r11 vpxor %xmm10,%xmm8,%xmm8 xorq %rbx,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm3,%xmm11 addq %r11,%rdx addq %rdi,%r11 vpxor %xmm9,%xmm8,%xmm8 movq %rdx,%r13 addq %r11,%r14 vpsllq $3,%xmm3,%xmm10 shrdq $23,%r13,%r13 movq %r14,%r11 vpaddq %xmm8,%xmm4,%xmm4 movq %r8,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm3,%xmm9 xorq %rdx,%r13 xorq %r9,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %r11,%r14 vpsllq $42,%xmm10,%xmm10 andq %rdx,%r12 xorq %rdx,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 72(%rsp),%r10 movq %r11,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %r9,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %rax,%rdi addq %r12,%r10 vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm4,%xmm4 xorq %r11,%r14 addq %r13,%r10 vpaddq 0(%rbp),%xmm4,%xmm10 xorq %rax,%r15 shrdq $28,%r14,%r14 addq %r10,%rcx addq %r15,%r10 movq %rcx,%r13 addq %r10,%r14 vmovdqa %xmm10,64(%rsp) vpalignr $8,%xmm5,%xmm6,%xmm8 shrdq $23,%r13,%r13 movq %r14,%r10 vpalignr $8,%xmm1,%xmm2,%xmm11 movq %rdx,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %rcx,%r13 xorq %r8,%r12 vpaddq %xmm11,%xmm5,%xmm5 shrdq $4,%r13,%r13 xorq %r10,%r14 vpsrlq $7,%xmm8,%xmm11 andq %rcx,%r12 xorq %rcx,%r13 vpsllq $56,%xmm8,%xmm9 addq 80(%rsp),%r9 movq %r10,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %r8,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %r11,%r15 addq %r12,%r9 vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %r10,%r14 addq %r13,%r9 vpxor %xmm10,%xmm8,%xmm8 xorq %r11,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm4,%xmm11 addq %r9,%rbx addq %rdi,%r9 vpxor %xmm9,%xmm8,%xmm8 movq %rbx,%r13 addq %r9,%r14 vpsllq $3,%xmm4,%xmm10 shrdq $23,%r13,%r13 movq %r14,%r9 vpaddq %xmm8,%xmm5,%xmm5 movq %rcx,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm4,%xmm9 xorq %rbx,%r13 xorq %rdx,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %r9,%r14 vpsllq $42,%xmm10,%xmm10 andq %rbx,%r12 xorq %rbx,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 88(%rsp),%r8 movq %r9,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %rdx,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %r10,%rdi addq %r12,%r8 vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm5,%xmm5 xorq %r9,%r14 addq %r13,%r8 vpaddq 32(%rbp),%xmm5,%xmm10 xorq %r10,%r15 shrdq $28,%r14,%r14 addq %r8,%rax addq %r15,%r8 movq %rax,%r13 addq %r8,%r14 vmovdqa %xmm10,80(%rsp) vpalignr $8,%xmm6,%xmm7,%xmm8 shrdq $23,%r13,%r13 movq %r14,%r8 vpalignr $8,%xmm2,%xmm3,%xmm11 movq %rbx,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %rax,%r13 xorq %rcx,%r12 vpaddq %xmm11,%xmm6,%xmm6 shrdq $4,%r13,%r13 xorq %r8,%r14 vpsrlq $7,%xmm8,%xmm11 andq %rax,%r12 xorq %rax,%r13 vpsllq $56,%xmm8,%xmm9 addq 96(%rsp),%rdx movq %r8,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %rcx,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %r9,%r15 addq %r12,%rdx vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %r8,%r14 addq %r13,%rdx vpxor %xmm10,%xmm8,%xmm8 xorq %r9,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm5,%xmm11 addq %rdx,%r11 addq %rdi,%rdx vpxor %xmm9,%xmm8,%xmm8 movq %r11,%r13 addq %rdx,%r14 vpsllq $3,%xmm5,%xmm10 shrdq $23,%r13,%r13 movq %r14,%rdx vpaddq %xmm8,%xmm6,%xmm6 movq %rax,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm5,%xmm9 xorq %r11,%r13 xorq %rbx,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %rdx,%r14 vpsllq $42,%xmm10,%xmm10 andq %r11,%r12 xorq %r11,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 104(%rsp),%rcx movq %rdx,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %rbx,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %r8,%rdi addq %r12,%rcx vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm6,%xmm6 xorq %rdx,%r14 addq %r13,%rcx vpaddq 64(%rbp),%xmm6,%xmm10 xorq %r8,%r15 shrdq $28,%r14,%r14 addq %rcx,%r10 addq %r15,%rcx movq %r10,%r13 addq %rcx,%r14 vmovdqa %xmm10,96(%rsp) vpalignr $8,%xmm7,%xmm0,%xmm8 shrdq $23,%r13,%r13 movq %r14,%rcx vpalignr $8,%xmm3,%xmm4,%xmm11 movq %r11,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %r10,%r13 xorq %rax,%r12 vpaddq %xmm11,%xmm7,%xmm7 shrdq $4,%r13,%r13 xorq %rcx,%r14 vpsrlq $7,%xmm8,%xmm11 andq %r10,%r12 xorq %r10,%r13 vpsllq $56,%xmm8,%xmm9 addq 112(%rsp),%rbx movq %rcx,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %rax,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %rdx,%r15 addq %r12,%rbx vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %rcx,%r14 addq %r13,%rbx vpxor %xmm10,%xmm8,%xmm8 xorq %rdx,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm6,%xmm11 addq %rbx,%r9 addq %rdi,%rbx vpxor %xmm9,%xmm8,%xmm8 movq %r9,%r13 addq %rbx,%r14 vpsllq $3,%xmm6,%xmm10 shrdq $23,%r13,%r13 movq %r14,%rbx vpaddq %xmm8,%xmm7,%xmm7 movq %r10,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm6,%xmm9 xorq %r9,%r13 xorq %r11,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %rbx,%r14 vpsllq $42,%xmm10,%xmm10 andq %r9,%r12 xorq %r9,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 120(%rsp),%rax movq %rbx,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %r11,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %rcx,%rdi addq %r12,%rax vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm7,%xmm7 xorq %rbx,%r14 addq %r13,%rax vpaddq 96(%rbp),%xmm7,%xmm10 xorq %rcx,%r15 shrdq $28,%r14,%r14 addq %rax,%r8 addq %r15,%rax movq %r8,%r13 addq %rax,%r14 vmovdqa %xmm10,112(%rsp) cmpb $0,135(%rbp) jne L$avx_00_47 shrdq $23,%r13,%r13 movq %r14,%rax movq %r9,%r12 shrdq $5,%r14,%r14 xorq %r8,%r13 xorq %r10,%r12 shrdq $4,%r13,%r13 xorq %rax,%r14 andq %r8,%r12 xorq %r8,%r13 addq 0(%rsp),%r11 movq %rax,%r15 xorq %r10,%r12 shrdq $6,%r14,%r14 xorq %rbx,%r15 addq %r12,%r11 shrdq $14,%r13,%r13 andq %r15,%rdi xorq %rax,%r14 addq %r13,%r11 xorq %rbx,%rdi shrdq $28,%r14,%r14 addq %r11,%rdx addq %rdi,%r11 movq %rdx,%r13 addq %r11,%r14 shrdq $23,%r13,%r13 movq %r14,%r11 movq %r8,%r12 shrdq $5,%r14,%r14 xorq %rdx,%r13 xorq %r9,%r12 shrdq $4,%r13,%r13 xorq %r11,%r14 andq %rdx,%r12 xorq %rdx,%r13 addq 8(%rsp),%r10 movq %r11,%rdi xorq %r9,%r12 shrdq $6,%r14,%r14 xorq %rax,%rdi addq %r12,%r10 shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %r11,%r14 addq %r13,%r10 xorq %rax,%r15 shrdq $28,%r14,%r14 addq %r10,%rcx addq %r15,%r10 movq %rcx,%r13 addq %r10,%r14 shrdq $23,%r13,%r13 movq %r14,%r10 movq %rdx,%r12 shrdq $5,%r14,%r14 xorq %rcx,%r13 xorq %r8,%r12 shrdq $4,%r13,%r13 xorq %r10,%r14 andq %rcx,%r12 xorq %rcx,%r13 addq 16(%rsp),%r9 movq %r10,%r15 xorq %r8,%r12 shrdq $6,%r14,%r14 xorq %r11,%r15 addq %r12,%r9 shrdq $14,%r13,%r13 andq %r15,%rdi xorq %r10,%r14 addq %r13,%r9 xorq %r11,%rdi shrdq $28,%r14,%r14 addq %r9,%rbx addq %rdi,%r9 movq %rbx,%r13 addq %r9,%r14 shrdq $23,%r13,%r13 movq %r14,%r9 movq %rcx,%r12 shrdq $5,%r14,%r14 xorq %rbx,%r13 xorq %rdx,%r12 shrdq $4,%r13,%r13 xorq %r9,%r14 andq %rbx,%r12 xorq %rbx,%r13 addq 24(%rsp),%r8 movq %r9,%rdi xorq %rdx,%r12 shrdq $6,%r14,%r14 xorq %r10,%rdi addq %r12,%r8 shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %r9,%r14 addq %r13,%r8 xorq %r10,%r15 shrdq $28,%r14,%r14 addq %r8,%rax addq %r15,%r8 movq %rax,%r13 addq %r8,%r14 shrdq $23,%r13,%r13 movq %r14,%r8 movq %rbx,%r12 shrdq $5,%r14,%r14 xorq %rax,%r13 xorq %rcx,%r12 shrdq $4,%r13,%r13 xorq %r8,%r14 andq %rax,%r12 xorq %rax,%r13 addq 32(%rsp),%rdx movq %r8,%r15 xorq %rcx,%r12 shrdq $6,%r14,%r14 xorq %r9,%r15 addq %r12,%rdx shrdq $14,%r13,%r13 andq %r15,%rdi xorq %r8,%r14 addq %r13,%rdx xorq %r9,%rdi shrdq $28,%r14,%r14 addq %rdx,%r11 addq %rdi,%rdx movq %r11,%r13 addq %rdx,%r14 shrdq $23,%r13,%r13 movq %r14,%rdx movq %rax,%r12 shrdq $5,%r14,%r14 xorq %r11,%r13 xorq %rbx,%r12 shrdq $4,%r13,%r13 xorq %rdx,%r14 andq %r11,%r12 xorq %r11,%r13 addq 40(%rsp),%rcx movq %rdx,%rdi xorq %rbx,%r12 shrdq $6,%r14,%r14 xorq %r8,%rdi addq %r12,%rcx shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %rdx,%r14 addq %r13,%rcx xorq %r8,%r15 shrdq $28,%r14,%r14 addq %rcx,%r10 addq %r15,%rcx movq %r10,%r13 addq %rcx,%r14 shrdq $23,%r13,%r13 movq %r14,%rcx movq %r11,%r12 shrdq $5,%r14,%r14 xorq %r10,%r13 xorq %rax,%r12 shrdq $4,%r13,%r13 xorq %rcx,%r14 andq %r10,%r12 xorq %r10,%r13 addq 48(%rsp),%rbx movq %rcx,%r15 xorq %rax,%r12 shrdq $6,%r14,%r14 xorq %rdx,%r15 addq %r12,%rbx shrdq $14,%r13,%r13 andq %r15,%rdi xorq %rcx,%r14 addq %r13,%rbx xorq %rdx,%rdi shrdq $28,%r14,%r14 addq %rbx,%r9 addq %rdi,%rbx movq %r9,%r13 addq %rbx,%r14 shrdq $23,%r13,%r13 movq %r14,%rbx movq %r10,%r12 shrdq $5,%r14,%r14 xorq %r9,%r13 xorq %r11,%r12 shrdq $4,%r13,%r13 xorq %rbx,%r14 andq %r9,%r12 xorq %r9,%r13 addq 56(%rsp),%rax movq %rbx,%rdi xorq %r11,%r12 shrdq $6,%r14,%r14 xorq %rcx,%rdi addq %r12,%rax shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %rbx,%r14 addq %r13,%rax xorq %rcx,%r15 shrdq $28,%r14,%r14 addq %rax,%r8 addq %r15,%rax movq %r8,%r13 addq %rax,%r14 shrdq $23,%r13,%r13 movq %r14,%rax movq %r9,%r12 shrdq $5,%r14,%r14 xorq %r8,%r13 xorq %r10,%r12 shrdq $4,%r13,%r13 xorq %rax,%r14 andq %r8,%r12 xorq %r8,%r13 addq 64(%rsp),%r11 movq %rax,%r15 xorq %r10,%r12 shrdq $6,%r14,%r14 xorq %rbx,%r15 addq %r12,%r11 shrdq $14,%r13,%r13 andq %r15,%rdi xorq %rax,%r14 addq %r13,%r11 xorq %rbx,%rdi shrdq $28,%r14,%r14 addq %r11,%rdx addq %rdi,%r11 movq %rdx,%r13 addq %r11,%r14 shrdq $23,%r13,%r13 movq %r14,%r11 movq %r8,%r12 shrdq $5,%r14,%r14 xorq %rdx,%r13 xorq %r9,%r12 shrdq $4,%r13,%r13 xorq %r11,%r14 andq %rdx,%r12 xorq %rdx,%r13 addq 72(%rsp),%r10 movq %r11,%rdi xorq %r9,%r12 shrdq $6,%r14,%r14 xorq %rax,%rdi addq %r12,%r10 shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %r11,%r14 addq %r13,%r10 xorq %rax,%r15 shrdq $28,%r14,%r14 addq %r10,%rcx addq %r15,%r10 movq %rcx,%r13 addq %r10,%r14 shrdq $23,%r13,%r13 movq %r14,%r10 movq %rdx,%r12 shrdq $5,%r14,%r14 xorq %rcx,%r13 xorq %r8,%r12 shrdq $4,%r13,%r13 xorq %r10,%r14 andq %rcx,%r12 xorq %rcx,%r13 addq 80(%rsp),%r9 movq %r10,%r15 xorq %r8,%r12 shrdq $6,%r14,%r14 xorq %r11,%r15 addq %r12,%r9 shrdq $14,%r13,%r13 andq %r15,%rdi xorq %r10,%r14 addq %r13,%r9 xorq %r11,%rdi shrdq $28,%r14,%r14 addq %r9,%rbx addq %rdi,%r9 movq %rbx,%r13 addq %r9,%r14 shrdq $23,%r13,%r13 movq %r14,%r9 movq %rcx,%r12 shrdq $5,%r14,%r14 xorq %rbx,%r13 xorq %rdx,%r12 shrdq $4,%r13,%r13 xorq %r9,%r14 andq %rbx,%r12 xorq %rbx,%r13 addq 88(%rsp),%r8 movq %r9,%rdi xorq %rdx,%r12 shrdq $6,%r14,%r14 xorq %r10,%rdi addq %r12,%r8 shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %r9,%r14 addq %r13,%r8 xorq %r10,%r15 shrdq $28,%r14,%r14 addq %r8,%rax addq %r15,%r8 movq %rax,%r13 addq %r8,%r14 shrdq $23,%r13,%r13 movq %r14,%r8 movq %rbx,%r12 shrdq $5,%r14,%r14 xorq %rax,%r13 xorq %rcx,%r12 shrdq $4,%r13,%r13 xorq %r8,%r14 andq %rax,%r12 xorq %rax,%r13 addq 96(%rsp),%rdx movq %r8,%r15 xorq %rcx,%r12 shrdq $6,%r14,%r14 xorq %r9,%r15 addq %r12,%rdx shrdq $14,%r13,%r13 andq %r15,%rdi xorq %r8,%r14 addq %r13,%rdx xorq %r9,%rdi shrdq $28,%r14,%r14 addq %rdx,%r11 addq %rdi,%rdx movq %r11,%r13 addq %rdx,%r14 shrdq $23,%r13,%r13 movq %r14,%rdx movq %rax,%r12 shrdq $5,%r14,%r14 xorq %r11,%r13 xorq %rbx,%r12 shrdq $4,%r13,%r13 xorq %rdx,%r14 andq %r11,%r12 xorq %r11,%r13 addq 104(%rsp),%rcx movq %rdx,%rdi xorq %rbx,%r12 shrdq $6,%r14,%r14 xorq %r8,%rdi addq %r12,%rcx shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %rdx,%r14 addq %r13,%rcx xorq %r8,%r15 shrdq $28,%r14,%r14 addq %rcx,%r10 addq %r15,%rcx movq %r10,%r13 addq %rcx,%r14 shrdq $23,%r13,%r13 movq %r14,%rcx movq %r11,%r12 shrdq $5,%r14,%r14 xorq %r10,%r13 xorq %rax,%r12 shrdq $4,%r13,%r13 xorq %rcx,%r14 andq %r10,%r12 xorq %r10,%r13 addq 112(%rsp),%rbx movq %rcx,%r15 xorq %rax,%r12 shrdq $6,%r14,%r14 xorq %rdx,%r15 addq %r12,%rbx shrdq $14,%r13,%r13 andq %r15,%rdi xorq %rcx,%r14 addq %r13,%rbx xorq %rdx,%rdi shrdq $28,%r14,%r14 addq %rbx,%r9 addq %rdi,%rbx movq %r9,%r13 addq %rbx,%r14 shrdq $23,%r13,%r13 movq %r14,%rbx movq %r10,%r12 shrdq $5,%r14,%r14 xorq %r9,%r13 xorq %r11,%r12 shrdq $4,%r13,%r13 xorq %rbx,%r14 andq %r9,%r12 xorq %r9,%r13 addq 120(%rsp),%rax movq %rbx,%rdi xorq %r11,%r12 shrdq $6,%r14,%r14 xorq %rcx,%rdi addq %r12,%rax shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %rbx,%r14 addq %r13,%rax xorq %rcx,%r15 shrdq $28,%r14,%r14 addq %rax,%r8 addq %r15,%rax movq %r8,%r13 addq %rax,%r14 movq 128+0(%rsp),%rdi movq %r14,%rax addq 0(%rdi),%rax leaq 128(%rsi),%rsi addq 8(%rdi),%rbx addq 16(%rdi),%rcx addq 24(%rdi),%rdx addq 32(%rdi),%r8 addq 40(%rdi),%r9 addq 48(%rdi),%r10 addq 56(%rdi),%r11 cmpq 128+16(%rsp),%rsi movq %rax,0(%rdi) movq %rbx,8(%rdi) movq %rcx,16(%rdi) movq %rdx,24(%rdi) movq %r8,32(%rdi) movq %r9,40(%rdi) movq %r10,48(%rdi) movq %r11,56(%rdi) jb L$loop_avx movq 152(%rsp),%rsi vzeroupper movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue_avx: .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
17,713
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/rsaz-2k-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl _rsaz_amm52x20_x1_ifma256 .private_extern _rsaz_amm52x20_x1_ifma256 .p2align 5 _rsaz_amm52x20_x1_ifma256: .byte 243,15,30,250 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$rsaz_amm52x20_x1_ifma256_body: vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 xorl %r9d,%r9d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $5,%ebx .p2align 5 L$loop5: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 movq 8(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 movq 16(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 movq 24(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 leaq 32(%r11),%r11 decl %ebx jne L$loop5 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm16,%ymm1 vpsrlq $52,%ymm17,%ymm2 vpsrlq $52,%ymm18,%ymm25 vpsrlq $52,%ymm19,%ymm26 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm2,%ymm25,%ymm25 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm16,%ymm16 vpandq L$mask52x4(%rip),%ymm17,%ymm17 vpandq L$mask52x4(%rip),%ymm18,%ymm18 vpandq L$mask52x4(%rip),%ymm19,%ymm19 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm16,%ymm16 vpaddq %ymm2,%ymm17,%ymm17 vpaddq %ymm25,%ymm18,%ymm18 vpaddq %ymm26,%ymm19,%ymm19 vpcmpuq $6,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm16,%k2 vpcmpuq $6,L$mask52x4(%rip),%ymm17,%k3 vpcmpuq $6,L$mask52x4(%rip),%ymm18,%k4 vpcmpuq $6,L$mask52x4(%rip),%ymm19,%k5 kmovb %k1,%r14d kmovb %k2,%r13d kmovb %k3,%r12d kmovb %k4,%r11d kmovb %k5,%r10d vpcmpuq $0,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm16,%k2 vpcmpuq $0,L$mask52x4(%rip),%ymm17,%k3 vpcmpuq $0,L$mask52x4(%rip),%ymm18,%k4 vpcmpuq $0,L$mask52x4(%rip),%ymm19,%k5 kmovb %k1,%r9d kmovb %k2,%r8d kmovb %k3,%ebx kmovb %k4,%ecx kmovb %k5,%edx shlb $4,%r13b orb %r13b,%r14b shlb $4,%r11b orb %r11b,%r12b addb %r14b,%r14b adcb %r12b,%r12b adcb %r10b,%r10b shlb $4,%r8b orb %r8b,%r9b shlb $4,%cl orb %cl,%bl addb %r9b,%r14b adcb %bl,%r12b adcb %dl,%r10b xorb %r9b,%r14b xorb %bl,%r12b xorb %dl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r12d,%k3 shrb $4,%r12b kmovb %r12d,%k4 kmovb %r10d,%k5 vpsubq L$mask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq L$mask52x4(%rip),%ymm16,%ymm16{%k2} vpsubq L$mask52x4(%rip),%ymm17,%ymm17{%k3} vpsubq L$mask52x4(%rip),%ymm18,%ymm18{%k4} vpsubq L$mask52x4(%rip),%ymm19,%ymm19{%k5} vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm16,%ymm16 vpandq L$mask52x4(%rip),%ymm17,%ymm17 vpandq L$mask52x4(%rip),%ymm18,%ymm18 vpandq L$mask52x4(%rip),%ymm19,%ymm19 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm16,32(%rdi) vmovdqu64 %ymm17,64(%rdi) vmovdqu64 %ymm18,96(%rdi) vmovdqu64 %ymm19,128(%rdi) vzeroupper movq 0(%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r13 movq 24(%rsp),%r12 movq 32(%rsp),%rbp movq 40(%rsp),%rbx leaq 48(%rsp),%rsp L$rsaz_amm52x20_x1_ifma256_epilogue: .byte 0xf3,0xc3 .section __DATA,__const .p2align 5 L$mask52x4: .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .text .text .globl _rsaz_amm52x20_x2_ifma256 .private_extern _rsaz_amm52x20_x2_ifma256 .p2align 5 _rsaz_amm52x20_x2_ifma256: .byte 243,15,30,250 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$rsaz_amm52x20_x2_ifma256_body: vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm20 vmovdqa64 %ymm0,%ymm21 vmovdqa64 %ymm0,%ymm22 vmovdqa64 %ymm0,%ymm23 xorl %r9d,%r9d xorl %r15d,%r15d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $20,%ebx .p2align 5 L$loop20: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq (%r8),%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 movq 160(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 160(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 movq %r12,%r10 adcq $0,%r10 movq 8(%r8),%r13 imulq %r15,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 160(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 adcq %r12,%r10 shrq $52,%r15 salq $12,%r10 orq %r10,%r15 vpmadd52luq 160(%rsi),%ymm1,%ymm4 vpmadd52luq 192(%rsi),%ymm1,%ymm20 vpmadd52luq 224(%rsi),%ymm1,%ymm21 vpmadd52luq 256(%rsi),%ymm1,%ymm22 vpmadd52luq 288(%rsi),%ymm1,%ymm23 vpmadd52luq 160(%rcx),%ymm2,%ymm4 vpmadd52luq 192(%rcx),%ymm2,%ymm20 vpmadd52luq 224(%rcx),%ymm2,%ymm21 vpmadd52luq 256(%rcx),%ymm2,%ymm22 vpmadd52luq 288(%rcx),%ymm2,%ymm23 valignq $1,%ymm4,%ymm20,%ymm4 valignq $1,%ymm20,%ymm21,%ymm20 valignq $1,%ymm21,%ymm22,%ymm21 valignq $1,%ymm22,%ymm23,%ymm22 valignq $1,%ymm23,%ymm0,%ymm23 vmovq %xmm4,%r13 addq %r13,%r15 vpmadd52huq 160(%rsi),%ymm1,%ymm4 vpmadd52huq 192(%rsi),%ymm1,%ymm20 vpmadd52huq 224(%rsi),%ymm1,%ymm21 vpmadd52huq 256(%rsi),%ymm1,%ymm22 vpmadd52huq 288(%rsi),%ymm1,%ymm23 vpmadd52huq 160(%rcx),%ymm2,%ymm4 vpmadd52huq 192(%rcx),%ymm2,%ymm20 vpmadd52huq 224(%rcx),%ymm2,%ymm21 vpmadd52huq 256(%rcx),%ymm2,%ymm22 vpmadd52huq 288(%rcx),%ymm2,%ymm23 leaq 8(%r11),%r11 decl %ebx jne L$loop20 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm16,%ymm1 vpsrlq $52,%ymm17,%ymm2 vpsrlq $52,%ymm18,%ymm25 vpsrlq $52,%ymm19,%ymm26 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm2,%ymm25,%ymm25 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm16,%ymm16 vpandq L$mask52x4(%rip),%ymm17,%ymm17 vpandq L$mask52x4(%rip),%ymm18,%ymm18 vpandq L$mask52x4(%rip),%ymm19,%ymm19 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm16,%ymm16 vpaddq %ymm2,%ymm17,%ymm17 vpaddq %ymm25,%ymm18,%ymm18 vpaddq %ymm26,%ymm19,%ymm19 vpcmpuq $6,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm16,%k2 vpcmpuq $6,L$mask52x4(%rip),%ymm17,%k3 vpcmpuq $6,L$mask52x4(%rip),%ymm18,%k4 vpcmpuq $6,L$mask52x4(%rip),%ymm19,%k5 kmovb %k1,%r14d kmovb %k2,%r13d kmovb %k3,%r12d kmovb %k4,%r11d kmovb %k5,%r10d vpcmpuq $0,L$mask52x4(%rip),%ymm3,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm16,%k2 vpcmpuq $0,L$mask52x4(%rip),%ymm17,%k3 vpcmpuq $0,L$mask52x4(%rip),%ymm18,%k4 vpcmpuq $0,L$mask52x4(%rip),%ymm19,%k5 kmovb %k1,%r9d kmovb %k2,%r8d kmovb %k3,%ebx kmovb %k4,%ecx kmovb %k5,%edx shlb $4,%r13b orb %r13b,%r14b shlb $4,%r11b orb %r11b,%r12b addb %r14b,%r14b adcb %r12b,%r12b adcb %r10b,%r10b shlb $4,%r8b orb %r8b,%r9b shlb $4,%cl orb %cl,%bl addb %r9b,%r14b adcb %bl,%r12b adcb %dl,%r10b xorb %r9b,%r14b xorb %bl,%r12b xorb %dl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r12d,%k3 shrb $4,%r12b kmovb %r12d,%k4 kmovb %r10d,%k5 vpsubq L$mask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq L$mask52x4(%rip),%ymm16,%ymm16{%k2} vpsubq L$mask52x4(%rip),%ymm17,%ymm17{%k3} vpsubq L$mask52x4(%rip),%ymm18,%ymm18{%k4} vpsubq L$mask52x4(%rip),%ymm19,%ymm19{%k5} vpandq L$mask52x4(%rip),%ymm3,%ymm3 vpandq L$mask52x4(%rip),%ymm16,%ymm16 vpandq L$mask52x4(%rip),%ymm17,%ymm17 vpandq L$mask52x4(%rip),%ymm18,%ymm18 vpandq L$mask52x4(%rip),%ymm19,%ymm19 vpbroadcastq %r15,%ymm0 vpblendd $3,%ymm0,%ymm4,%ymm4 vpsrlq $52,%ymm4,%ymm0 vpsrlq $52,%ymm20,%ymm1 vpsrlq $52,%ymm21,%ymm2 vpsrlq $52,%ymm22,%ymm25 vpsrlq $52,%ymm23,%ymm26 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm2,%ymm25,%ymm25 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,L$zeros(%rip),%ymm0,%ymm0 vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm20,%ymm20 vpandq L$mask52x4(%rip),%ymm21,%ymm21 vpandq L$mask52x4(%rip),%ymm22,%ymm22 vpandq L$mask52x4(%rip),%ymm23,%ymm23 vpaddq %ymm0,%ymm4,%ymm4 vpaddq %ymm1,%ymm20,%ymm20 vpaddq %ymm2,%ymm21,%ymm21 vpaddq %ymm25,%ymm22,%ymm22 vpaddq %ymm26,%ymm23,%ymm23 vpcmpuq $6,L$mask52x4(%rip),%ymm4,%k1 vpcmpuq $6,L$mask52x4(%rip),%ymm20,%k2 vpcmpuq $6,L$mask52x4(%rip),%ymm21,%k3 vpcmpuq $6,L$mask52x4(%rip),%ymm22,%k4 vpcmpuq $6,L$mask52x4(%rip),%ymm23,%k5 kmovb %k1,%r14d kmovb %k2,%r13d kmovb %k3,%r12d kmovb %k4,%r11d kmovb %k5,%r10d vpcmpuq $0,L$mask52x4(%rip),%ymm4,%k1 vpcmpuq $0,L$mask52x4(%rip),%ymm20,%k2 vpcmpuq $0,L$mask52x4(%rip),%ymm21,%k3 vpcmpuq $0,L$mask52x4(%rip),%ymm22,%k4 vpcmpuq $0,L$mask52x4(%rip),%ymm23,%k5 kmovb %k1,%r9d kmovb %k2,%r8d kmovb %k3,%ebx kmovb %k4,%ecx kmovb %k5,%edx shlb $4,%r13b orb %r13b,%r14b shlb $4,%r11b orb %r11b,%r12b addb %r14b,%r14b adcb %r12b,%r12b adcb %r10b,%r10b shlb $4,%r8b orb %r8b,%r9b shlb $4,%cl orb %cl,%bl addb %r9b,%r14b adcb %bl,%r12b adcb %dl,%r10b xorb %r9b,%r14b xorb %bl,%r12b xorb %dl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r12d,%k3 shrb $4,%r12b kmovb %r12d,%k4 kmovb %r10d,%k5 vpsubq L$mask52x4(%rip),%ymm4,%ymm4{%k1} vpsubq L$mask52x4(%rip),%ymm20,%ymm20{%k2} vpsubq L$mask52x4(%rip),%ymm21,%ymm21{%k3} vpsubq L$mask52x4(%rip),%ymm22,%ymm22{%k4} vpsubq L$mask52x4(%rip),%ymm23,%ymm23{%k5} vpandq L$mask52x4(%rip),%ymm4,%ymm4 vpandq L$mask52x4(%rip),%ymm20,%ymm20 vpandq L$mask52x4(%rip),%ymm21,%ymm21 vpandq L$mask52x4(%rip),%ymm22,%ymm22 vpandq L$mask52x4(%rip),%ymm23,%ymm23 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm16,32(%rdi) vmovdqu64 %ymm17,64(%rdi) vmovdqu64 %ymm18,96(%rdi) vmovdqu64 %ymm19,128(%rdi) vmovdqu64 %ymm4,160(%rdi) vmovdqu64 %ymm20,192(%rdi) vmovdqu64 %ymm21,224(%rdi) vmovdqu64 %ymm22,256(%rdi) vmovdqu64 %ymm23,288(%rdi) vzeroupper movq 0(%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r13 movq 24(%rsp),%r12 movq 32(%rsp),%rbp movq 40(%rsp),%rbx leaq 48(%rsp),%rsp L$rsaz_amm52x20_x2_ifma256_epilogue: .byte 0xf3,0xc3 .text .p2align 5 .globl _extract_multiplier_2x20_win5 .private_extern _extract_multiplier_2x20_win5 _extract_multiplier_2x20_win5: .byte 243,15,30,250 vmovdqa64 L$ones(%rip),%ymm24 vpbroadcastq %rdx,%ymm22 vpbroadcastq %rcx,%ymm23 leaq 10240(%rsi),%rax vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %ymm0,%ymm21 vmovdqa64 %ymm0,%ymm1 vmovdqa64 %ymm0,%ymm2 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 .p2align 5 L$loop: vpcmpq $0,%ymm21,%ymm22,%k1 vpcmpq $0,%ymm21,%ymm23,%k2 vmovdqu64 0(%rsi),%ymm20 vpblendmq %ymm20,%ymm0,%ymm0{%k1} vmovdqu64 32(%rsi),%ymm20 vpblendmq %ymm20,%ymm1,%ymm1{%k1} vmovdqu64 64(%rsi),%ymm20 vpblendmq %ymm20,%ymm2,%ymm2{%k1} vmovdqu64 96(%rsi),%ymm20 vpblendmq %ymm20,%ymm3,%ymm3{%k1} vmovdqu64 128(%rsi),%ymm20 vpblendmq %ymm20,%ymm4,%ymm4{%k1} vmovdqu64 160(%rsi),%ymm20 vpblendmq %ymm20,%ymm5,%ymm5{%k2} vmovdqu64 192(%rsi),%ymm20 vpblendmq %ymm20,%ymm16,%ymm16{%k2} vmovdqu64 224(%rsi),%ymm20 vpblendmq %ymm20,%ymm17,%ymm17{%k2} vmovdqu64 256(%rsi),%ymm20 vpblendmq %ymm20,%ymm18,%ymm18{%k2} vmovdqu64 288(%rsi),%ymm20 vpblendmq %ymm20,%ymm19,%ymm19{%k2} vpaddq %ymm24,%ymm21,%ymm21 addq $320,%rsi cmpq %rsi,%rax jne L$loop vmovdqu64 %ymm0,0(%rdi) vmovdqu64 %ymm1,32(%rdi) vmovdqu64 %ymm2,64(%rdi) vmovdqu64 %ymm3,96(%rdi) vmovdqu64 %ymm4,128(%rdi) vmovdqu64 %ymm5,160(%rdi) vmovdqu64 %ymm16,192(%rdi) vmovdqu64 %ymm17,224(%rdi) vmovdqu64 %ymm18,256(%rdi) vmovdqu64 %ymm19,288(%rdi) .byte 0xf3,0xc3 .section __DATA,__const .p2align 5 L$ones: .quad 1,1,1,1 L$zeros: .quad 0,0,0,0 .text #endif #endif
marvin-hansen/iggy-streaming-system
17,259
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/vpaes-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .p2align 4 _vpaes_encrypt_core: movq %rdx,%r9 movq $16,%r11 movl 240(%rdx),%eax movdqa %xmm9,%xmm1 movdqa L$k_ipt(%rip),%xmm2 pandn %xmm0,%xmm1 movdqu (%r9),%xmm5 psrld $4,%xmm1 pand %xmm9,%xmm0 .byte 102,15,56,0,208 movdqa L$k_ipt+16(%rip),%xmm0 .byte 102,15,56,0,193 pxor %xmm5,%xmm2 addq $16,%r9 pxor %xmm2,%xmm0 leaq L$k_mc_backward(%rip),%r10 jmp L$enc_entry .p2align 4 L$enc_loop: movdqa %xmm13,%xmm4 movdqa %xmm12,%xmm0 .byte 102,15,56,0,226 .byte 102,15,56,0,195 pxor %xmm5,%xmm4 movdqa %xmm15,%xmm5 pxor %xmm4,%xmm0 movdqa -64(%r11,%r10,1),%xmm1 .byte 102,15,56,0,234 movdqa (%r11,%r10,1),%xmm4 movdqa %xmm14,%xmm2 .byte 102,15,56,0,211 movdqa %xmm0,%xmm3 pxor %xmm5,%xmm2 .byte 102,15,56,0,193 addq $16,%r9 pxor %xmm2,%xmm0 .byte 102,15,56,0,220 addq $16,%r11 pxor %xmm0,%xmm3 .byte 102,15,56,0,193 andq $0x30,%r11 subq $1,%rax pxor %xmm3,%xmm0 L$enc_entry: movdqa %xmm9,%xmm1 movdqa %xmm11,%xmm5 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm9,%xmm0 .byte 102,15,56,0,232 movdqa %xmm10,%xmm3 pxor %xmm1,%xmm0 .byte 102,15,56,0,217 movdqa %xmm10,%xmm4 pxor %xmm5,%xmm3 .byte 102,15,56,0,224 movdqa %xmm10,%xmm2 pxor %xmm5,%xmm4 .byte 102,15,56,0,211 movdqa %xmm10,%xmm3 pxor %xmm0,%xmm2 .byte 102,15,56,0,220 movdqu (%r9),%xmm5 pxor %xmm1,%xmm3 jnz L$enc_loop movdqa -96(%r10),%xmm4 movdqa -80(%r10),%xmm0 .byte 102,15,56,0,226 pxor %xmm5,%xmm4 .byte 102,15,56,0,195 movdqa 64(%r11,%r10,1),%xmm1 pxor %xmm4,%xmm0 .byte 102,15,56,0,193 .byte 0xf3,0xc3 .p2align 4 _vpaes_encrypt_core_2x: movq %rdx,%r9 movq $16,%r11 movl 240(%rdx),%eax movdqa %xmm9,%xmm1 movdqa %xmm9,%xmm7 movdqa L$k_ipt(%rip),%xmm2 movdqa %xmm2,%xmm8 pandn %xmm0,%xmm1 pandn %xmm6,%xmm7 movdqu (%r9),%xmm5 psrld $4,%xmm1 psrld $4,%xmm7 pand %xmm9,%xmm0 pand %xmm9,%xmm6 .byte 102,15,56,0,208 .byte 102,68,15,56,0,198 movdqa L$k_ipt+16(%rip),%xmm0 movdqa %xmm0,%xmm6 .byte 102,15,56,0,193 .byte 102,15,56,0,247 pxor %xmm5,%xmm2 pxor %xmm5,%xmm8 addq $16,%r9 pxor %xmm2,%xmm0 pxor %xmm8,%xmm6 leaq L$k_mc_backward(%rip),%r10 jmp L$enc2x_entry .p2align 4 L$enc2x_loop: movdqa L$k_sb1(%rip),%xmm4 movdqa L$k_sb1+16(%rip),%xmm0 movdqa %xmm4,%xmm12 movdqa %xmm0,%xmm6 .byte 102,15,56,0,226 .byte 102,69,15,56,0,224 .byte 102,15,56,0,195 .byte 102,65,15,56,0,243 pxor %xmm5,%xmm4 pxor %xmm5,%xmm12 movdqa L$k_sb2(%rip),%xmm5 movdqa %xmm5,%xmm13 pxor %xmm4,%xmm0 pxor %xmm12,%xmm6 movdqa -64(%r11,%r10,1),%xmm1 .byte 102,15,56,0,234 .byte 102,69,15,56,0,232 movdqa (%r11,%r10,1),%xmm4 movdqa L$k_sb2+16(%rip),%xmm2 movdqa %xmm2,%xmm8 .byte 102,15,56,0,211 .byte 102,69,15,56,0,195 movdqa %xmm0,%xmm3 movdqa %xmm6,%xmm11 pxor %xmm5,%xmm2 pxor %xmm13,%xmm8 .byte 102,15,56,0,193 .byte 102,15,56,0,241 addq $16,%r9 pxor %xmm2,%xmm0 pxor %xmm8,%xmm6 .byte 102,15,56,0,220 .byte 102,68,15,56,0,220 addq $16,%r11 pxor %xmm0,%xmm3 pxor %xmm6,%xmm11 .byte 102,15,56,0,193 .byte 102,15,56,0,241 andq $0x30,%r11 subq $1,%rax pxor %xmm3,%xmm0 pxor %xmm11,%xmm6 L$enc2x_entry: movdqa %xmm9,%xmm1 movdqa %xmm9,%xmm7 movdqa L$k_inv+16(%rip),%xmm5 movdqa %xmm5,%xmm13 pandn %xmm0,%xmm1 pandn %xmm6,%xmm7 psrld $4,%xmm1 psrld $4,%xmm7 pand %xmm9,%xmm0 pand %xmm9,%xmm6 .byte 102,15,56,0,232 .byte 102,68,15,56,0,238 movdqa %xmm10,%xmm3 movdqa %xmm10,%xmm11 pxor %xmm1,%xmm0 pxor %xmm7,%xmm6 .byte 102,15,56,0,217 .byte 102,68,15,56,0,223 movdqa %xmm10,%xmm4 movdqa %xmm10,%xmm12 pxor %xmm5,%xmm3 pxor %xmm13,%xmm11 .byte 102,15,56,0,224 .byte 102,68,15,56,0,230 movdqa %xmm10,%xmm2 movdqa %xmm10,%xmm8 pxor %xmm5,%xmm4 pxor %xmm13,%xmm12 .byte 102,15,56,0,211 .byte 102,69,15,56,0,195 movdqa %xmm10,%xmm3 movdqa %xmm10,%xmm11 pxor %xmm0,%xmm2 pxor %xmm6,%xmm8 .byte 102,15,56,0,220 .byte 102,69,15,56,0,220 movdqu (%r9),%xmm5 pxor %xmm1,%xmm3 pxor %xmm7,%xmm11 jnz L$enc2x_loop movdqa -96(%r10),%xmm4 movdqa -80(%r10),%xmm0 movdqa %xmm4,%xmm12 movdqa %xmm0,%xmm6 .byte 102,15,56,0,226 .byte 102,69,15,56,0,224 pxor %xmm5,%xmm4 pxor %xmm5,%xmm12 .byte 102,15,56,0,195 .byte 102,65,15,56,0,243 movdqa 64(%r11,%r10,1),%xmm1 pxor %xmm4,%xmm0 pxor %xmm12,%xmm6 .byte 102,15,56,0,193 .byte 102,15,56,0,241 .byte 0xf3,0xc3 .p2align 4 _vpaes_decrypt_core: movq %rdx,%r9 movl 240(%rdx),%eax movdqa %xmm9,%xmm1 movdqa L$k_dipt(%rip),%xmm2 pandn %xmm0,%xmm1 movq %rax,%r11 psrld $4,%xmm1 movdqu (%r9),%xmm5 shlq $4,%r11 pand %xmm9,%xmm0 .byte 102,15,56,0,208 movdqa L$k_dipt+16(%rip),%xmm0 xorq $0x30,%r11 leaq L$k_dsbd(%rip),%r10 .byte 102,15,56,0,193 andq $0x30,%r11 pxor %xmm5,%xmm2 movdqa L$k_mc_forward+48(%rip),%xmm5 pxor %xmm2,%xmm0 addq $16,%r9 addq %r10,%r11 jmp L$dec_entry .p2align 4 L$dec_loop: movdqa -32(%r10),%xmm4 movdqa -16(%r10),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa 0(%r10),%xmm4 pxor %xmm1,%xmm0 movdqa 16(%r10),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa 32(%r10),%xmm4 pxor %xmm1,%xmm0 movdqa 48(%r10),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa 64(%r10),%xmm4 pxor %xmm1,%xmm0 movdqa 80(%r10),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 addq $16,%r9 .byte 102,15,58,15,237,12 pxor %xmm1,%xmm0 subq $1,%rax L$dec_entry: movdqa %xmm9,%xmm1 pandn %xmm0,%xmm1 movdqa %xmm11,%xmm2 psrld $4,%xmm1 pand %xmm9,%xmm0 .byte 102,15,56,0,208 movdqa %xmm10,%xmm3 pxor %xmm1,%xmm0 .byte 102,15,56,0,217 movdqa %xmm10,%xmm4 pxor %xmm2,%xmm3 .byte 102,15,56,0,224 pxor %xmm2,%xmm4 movdqa %xmm10,%xmm2 .byte 102,15,56,0,211 movdqa %xmm10,%xmm3 pxor %xmm0,%xmm2 .byte 102,15,56,0,220 movdqu (%r9),%xmm0 pxor %xmm1,%xmm3 jnz L$dec_loop movdqa 96(%r10),%xmm4 .byte 102,15,56,0,226 pxor %xmm0,%xmm4 movdqa 112(%r10),%xmm0 movdqa -352(%r11),%xmm2 .byte 102,15,56,0,195 pxor %xmm4,%xmm0 .byte 102,15,56,0,194 .byte 0xf3,0xc3 .p2align 4 _vpaes_schedule_core: call _vpaes_preheat movdqa L$k_rcon(%rip),%xmm8 movdqu (%rdi),%xmm0 movdqa %xmm0,%xmm3 leaq L$k_ipt(%rip),%r11 call _vpaes_schedule_transform movdqa %xmm0,%xmm7 leaq L$k_sr(%rip),%r10 testq %rcx,%rcx jnz L$schedule_am_decrypting movdqu %xmm0,(%rdx) jmp L$schedule_go L$schedule_am_decrypting: movdqa (%r8,%r10,1),%xmm1 .byte 102,15,56,0,217 movdqu %xmm3,(%rdx) xorq $0x30,%r8 L$schedule_go: cmpl $192,%esi ja L$schedule_256 je L$schedule_192 L$schedule_128: movl $10,%esi L$oop_schedule_128: call _vpaes_schedule_round decq %rsi jz L$schedule_mangle_last call _vpaes_schedule_mangle jmp L$oop_schedule_128 .p2align 4 L$schedule_192: movdqu 8(%rdi),%xmm0 call _vpaes_schedule_transform movdqa %xmm0,%xmm6 pxor %xmm4,%xmm4 movhlps %xmm4,%xmm6 movl $4,%esi L$oop_schedule_192: call _vpaes_schedule_round .byte 102,15,58,15,198,8 call _vpaes_schedule_mangle call _vpaes_schedule_192_smear call _vpaes_schedule_mangle call _vpaes_schedule_round decq %rsi jz L$schedule_mangle_last call _vpaes_schedule_mangle call _vpaes_schedule_192_smear jmp L$oop_schedule_192 .p2align 4 L$schedule_256: movdqu 16(%rdi),%xmm0 call _vpaes_schedule_transform movl $7,%esi L$oop_schedule_256: call _vpaes_schedule_mangle movdqa %xmm0,%xmm6 call _vpaes_schedule_round decq %rsi jz L$schedule_mangle_last call _vpaes_schedule_mangle pshufd $0xFF,%xmm0,%xmm0 movdqa %xmm7,%xmm5 movdqa %xmm6,%xmm7 call _vpaes_schedule_low_round movdqa %xmm5,%xmm7 jmp L$oop_schedule_256 .p2align 4 L$schedule_mangle_last: leaq L$k_deskew(%rip),%r11 testq %rcx,%rcx jnz L$schedule_mangle_last_dec movdqa (%r8,%r10,1),%xmm1 .byte 102,15,56,0,193 leaq L$k_opt(%rip),%r11 addq $32,%rdx L$schedule_mangle_last_dec: addq $-16,%rdx pxor L$k_s63(%rip),%xmm0 call _vpaes_schedule_transform movdqu %xmm0,(%rdx) pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 .byte 0xf3,0xc3 .p2align 4 _vpaes_schedule_192_smear: pshufd $0x80,%xmm6,%xmm1 pshufd $0xFE,%xmm7,%xmm0 pxor %xmm1,%xmm6 pxor %xmm1,%xmm1 pxor %xmm0,%xmm6 movdqa %xmm6,%xmm0 movhlps %xmm1,%xmm6 .byte 0xf3,0xc3 .p2align 4 _vpaes_schedule_round: pxor %xmm1,%xmm1 .byte 102,65,15,58,15,200,15 .byte 102,69,15,58,15,192,15 pxor %xmm1,%xmm7 pshufd $0xFF,%xmm0,%xmm0 .byte 102,15,58,15,192,1 _vpaes_schedule_low_round: movdqa %xmm7,%xmm1 pslldq $4,%xmm7 pxor %xmm1,%xmm7 movdqa %xmm7,%xmm1 pslldq $8,%xmm7 pxor %xmm1,%xmm7 pxor L$k_s63(%rip),%xmm7 movdqa %xmm9,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm9,%xmm0 movdqa %xmm11,%xmm2 .byte 102,15,56,0,208 pxor %xmm1,%xmm0 movdqa %xmm10,%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 movdqa %xmm10,%xmm4 .byte 102,15,56,0,224 pxor %xmm2,%xmm4 movdqa %xmm10,%xmm2 .byte 102,15,56,0,211 pxor %xmm0,%xmm2 movdqa %xmm10,%xmm3 .byte 102,15,56,0,220 pxor %xmm1,%xmm3 movdqa %xmm13,%xmm4 .byte 102,15,56,0,226 movdqa %xmm12,%xmm0 .byte 102,15,56,0,195 pxor %xmm4,%xmm0 pxor %xmm7,%xmm0 movdqa %xmm0,%xmm7 .byte 0xf3,0xc3 .p2align 4 _vpaes_schedule_transform: movdqa %xmm9,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm9,%xmm0 movdqa (%r11),%xmm2 .byte 102,15,56,0,208 movdqa 16(%r11),%xmm0 .byte 102,15,56,0,193 pxor %xmm2,%xmm0 .byte 0xf3,0xc3 .p2align 4 _vpaes_schedule_mangle: movdqa %xmm0,%xmm4 movdqa L$k_mc_forward(%rip),%xmm5 testq %rcx,%rcx jnz L$schedule_mangle_dec addq $16,%rdx pxor L$k_s63(%rip),%xmm4 .byte 102,15,56,0,229 movdqa %xmm4,%xmm3 .byte 102,15,56,0,229 pxor %xmm4,%xmm3 .byte 102,15,56,0,229 pxor %xmm4,%xmm3 jmp L$schedule_mangle_both .p2align 4 L$schedule_mangle_dec: leaq L$k_dksd(%rip),%r11 movdqa %xmm9,%xmm1 pandn %xmm4,%xmm1 psrld $4,%xmm1 pand %xmm9,%xmm4 movdqa 0(%r11),%xmm2 .byte 102,15,56,0,212 movdqa 16(%r11),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 32(%r11),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 48(%r11),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 64(%r11),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 80(%r11),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 96(%r11),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 112(%r11),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 addq $-16,%rdx L$schedule_mangle_both: movdqa (%r8,%r10,1),%xmm1 .byte 102,15,56,0,217 addq $-16,%r8 andq $0x30,%r8 movdqu %xmm3,(%rdx) .byte 0xf3,0xc3 .globl _vpaes_set_encrypt_key .private_extern _vpaes_set_encrypt_key .p2align 4 _vpaes_set_encrypt_key: _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST movb $1,_BORINGSSL_function_hit+5(%rip) #endif movl %esi,%eax shrl $5,%eax addl $5,%eax movl %eax,240(%rdx) movl $0,%ecx movl $0x30,%r8d call _vpaes_schedule_core xorl %eax,%eax .byte 0xf3,0xc3 .globl _vpaes_set_decrypt_key .private_extern _vpaes_set_decrypt_key .p2align 4 _vpaes_set_decrypt_key: _CET_ENDBR movl %esi,%eax shrl $5,%eax addl $5,%eax movl %eax,240(%rdx) shll $4,%eax leaq 16(%rdx,%rax,1),%rdx movl $1,%ecx movl %esi,%r8d shrl $1,%r8d andl $32,%r8d xorl $32,%r8d call _vpaes_schedule_core xorl %eax,%eax .byte 0xf3,0xc3 .globl _vpaes_encrypt .private_extern _vpaes_encrypt .p2align 4 _vpaes_encrypt: _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST movb $1,_BORINGSSL_function_hit+4(%rip) #endif movdqu (%rdi),%xmm0 call _vpaes_preheat call _vpaes_encrypt_core movdqu %xmm0,(%rsi) .byte 0xf3,0xc3 .globl _vpaes_decrypt .private_extern _vpaes_decrypt .p2align 4 _vpaes_decrypt: _CET_ENDBR movdqu (%rdi),%xmm0 call _vpaes_preheat call _vpaes_decrypt_core movdqu %xmm0,(%rsi) .byte 0xf3,0xc3 .globl _vpaes_cbc_encrypt .private_extern _vpaes_cbc_encrypt .p2align 4 _vpaes_cbc_encrypt: _CET_ENDBR xchgq %rcx,%rdx subq $16,%rcx jc L$cbc_abort movdqu (%r8),%xmm6 subq %rdi,%rsi call _vpaes_preheat cmpl $0,%r9d je L$cbc_dec_loop jmp L$cbc_enc_loop .p2align 4 L$cbc_enc_loop: movdqu (%rdi),%xmm0 pxor %xmm6,%xmm0 call _vpaes_encrypt_core movdqa %xmm0,%xmm6 movdqu %xmm0,(%rsi,%rdi,1) leaq 16(%rdi),%rdi subq $16,%rcx jnc L$cbc_enc_loop jmp L$cbc_done .p2align 4 L$cbc_dec_loop: movdqu (%rdi),%xmm0 movdqa %xmm0,%xmm7 call _vpaes_decrypt_core pxor %xmm6,%xmm0 movdqa %xmm7,%xmm6 movdqu %xmm0,(%rsi,%rdi,1) leaq 16(%rdi),%rdi subq $16,%rcx jnc L$cbc_dec_loop L$cbc_done: movdqu %xmm6,(%r8) L$cbc_abort: .byte 0xf3,0xc3 .globl _vpaes_ctr32_encrypt_blocks .private_extern _vpaes_ctr32_encrypt_blocks .p2align 4 _vpaes_ctr32_encrypt_blocks: _CET_ENDBR xchgq %rcx,%rdx testq %rcx,%rcx jz L$ctr32_abort movdqu (%r8),%xmm0 movdqa L$ctr_add_one(%rip),%xmm8 subq %rdi,%rsi call _vpaes_preheat movdqa %xmm0,%xmm6 pshufb L$rev_ctr(%rip),%xmm6 testq $1,%rcx jz L$ctr32_prep_loop movdqu (%rdi),%xmm7 call _vpaes_encrypt_core pxor %xmm7,%xmm0 paddd %xmm8,%xmm6 movdqu %xmm0,(%rsi,%rdi,1) subq $1,%rcx leaq 16(%rdi),%rdi jz L$ctr32_done L$ctr32_prep_loop: movdqa %xmm6,%xmm14 movdqa %xmm6,%xmm15 paddd %xmm8,%xmm15 L$ctr32_loop: movdqa L$rev_ctr(%rip),%xmm1 movdqa %xmm14,%xmm0 movdqa %xmm15,%xmm6 .byte 102,15,56,0,193 .byte 102,15,56,0,241 call _vpaes_encrypt_core_2x movdqu (%rdi),%xmm1 movdqu 16(%rdi),%xmm2 movdqa L$ctr_add_two(%rip),%xmm3 pxor %xmm1,%xmm0 pxor %xmm2,%xmm6 paddd %xmm3,%xmm14 paddd %xmm3,%xmm15 movdqu %xmm0,(%rsi,%rdi,1) movdqu %xmm6,16(%rsi,%rdi,1) subq $2,%rcx leaq 32(%rdi),%rdi jnz L$ctr32_loop L$ctr32_done: L$ctr32_abort: .byte 0xf3,0xc3 .p2align 4 _vpaes_preheat: leaq L$k_s0F(%rip),%r10 movdqa -32(%r10),%xmm10 movdqa -16(%r10),%xmm11 movdqa 0(%r10),%xmm9 movdqa 48(%r10),%xmm13 movdqa 64(%r10),%xmm12 movdqa 80(%r10),%xmm15 movdqa 96(%r10),%xmm14 .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 _vpaes_consts: L$k_inv: .quad 0x0E05060F0D080180, 0x040703090A0B0C02 .quad 0x01040A060F0B0780, 0x030D0E0C02050809 L$k_s0F: .quad 0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F0F0F0F0F L$k_ipt: .quad 0xC2B2E8985A2A7000, 0xCABAE09052227808 .quad 0x4C01307D317C4D00, 0xCD80B1FCB0FDCC81 L$k_sb1: .quad 0xB19BE18FCB503E00, 0xA5DF7A6E142AF544 .quad 0x3618D415FAE22300, 0x3BF7CCC10D2ED9EF L$k_sb2: .quad 0xE27A93C60B712400, 0x5EB7E955BC982FCD .quad 0x69EB88400AE12900, 0xC2A163C8AB82234A L$k_sbo: .quad 0xD0D26D176FBDC700, 0x15AABF7AC502A878 .quad 0xCFE474A55FBB6A00, 0x8E1E90D1412B35FA L$k_mc_forward: .quad 0x0407060500030201, 0x0C0F0E0D080B0A09 .quad 0x080B0A0904070605, 0x000302010C0F0E0D .quad 0x0C0F0E0D080B0A09, 0x0407060500030201 .quad 0x000302010C0F0E0D, 0x080B0A0904070605 L$k_mc_backward: .quad 0x0605040702010003, 0x0E0D0C0F0A09080B .quad 0x020100030E0D0C0F, 0x0A09080B06050407 .quad 0x0E0D0C0F0A09080B, 0x0605040702010003 .quad 0x0A09080B06050407, 0x020100030E0D0C0F L$k_sr: .quad 0x0706050403020100, 0x0F0E0D0C0B0A0908 .quad 0x030E09040F0A0500, 0x0B06010C07020D08 .quad 0x0F060D040B020900, 0x070E050C030A0108 .quad 0x0B0E0104070A0D00, 0x0306090C0F020508 L$k_rcon: .quad 0x1F8391B9AF9DEEB6, 0x702A98084D7C7D81 L$k_s63: .quad 0x5B5B5B5B5B5B5B5B, 0x5B5B5B5B5B5B5B5B L$k_opt: .quad 0xFF9F4929D6B66000, 0xF7974121DEBE6808 .quad 0x01EDBD5150BCEC00, 0xE10D5DB1B05C0CE0 L$k_deskew: .quad 0x07E4A34047A4E300, 0x1DFEB95A5DBEF91A .quad 0x5F36B5DC83EA6900, 0x2841C2ABF49D1E77 L$k_dksd: .quad 0xFEB91A5DA3E44700, 0x0740E3A45A1DBEF9 .quad 0x41C277F4B5368300, 0x5FDC69EAAB289D1E L$k_dksb: .quad 0x9A4FCA1F8550D500, 0x03D653861CC94C99 .quad 0x115BEDA7B6FC4A00, 0xD993256F7E3482C8 L$k_dkse: .quad 0xD5031CCA1FC9D600, 0x53859A4C994F5086 .quad 0xA23196054FDC7BE8, 0xCD5EF96A20B31487 L$k_dks9: .quad 0xB6116FC87ED9A700, 0x4AED933482255BFC .quad 0x4576516227143300, 0x8BB89FACE9DAFDCE L$k_dipt: .quad 0x0F505B040B545F00, 0x154A411E114E451A .quad 0x86E383E660056500, 0x12771772F491F194 L$k_dsb9: .quad 0x851C03539A86D600, 0xCAD51F504F994CC9 .quad 0xC03B1789ECD74900, 0x725E2C9EB2FBA565 L$k_dsbd: .quad 0x7D57CCDFE6B1A200, 0xF56E9B13882A4439 .quad 0x3CE2FAF724C6CB00, 0x2931180D15DEEFD3 L$k_dsbb: .quad 0xD022649296B44200, 0x602646F6B0F2D404 .quad 0xC19498A6CD596700, 0xF3FF0C3E3255AA6B L$k_dsbe: .quad 0x46F2929626D4D000, 0x2242600464B4F6B0 .quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32 L$k_dsbo: .quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D .quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C L$rev_ctr: .quad 0x0706050403020100, 0x0c0d0e0f0b0a0908 L$ctr_add_one: .quad 0x0000000000000000, 0x0000000100000000 L$ctr_add_two: .quad 0x0000000000000000, 0x0000000200000000 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105,118,101,114,115,105,116,121,41,0 .p2align 6 .text #endif
marvin-hansen/iggy-streaming-system
4,102
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .private_extern _beeu_mod_inverse_vartime .globl _beeu_mod_inverse_vartime .private_extern _beeu_mod_inverse_vartime .p2align 5 _beeu_mod_inverse_vartime: _CET_ENDBR pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 pushq %rbx pushq %rsi subq $80,%rsp movq %rdi,0(%rsp) movq $1,%r8 xorq %r9,%r9 xorq %r10,%r10 xorq %r11,%r11 xorq %rdi,%rdi xorq %r12,%r12 xorq %r13,%r13 xorq %r14,%r14 xorq %r15,%r15 xorq %rbp,%rbp vmovdqu 0(%rsi),%xmm0 vmovdqu 16(%rsi),%xmm1 vmovdqu %xmm0,48(%rsp) vmovdqu %xmm1,64(%rsp) vmovdqu 0(%rdx),%xmm0 vmovdqu 16(%rdx),%xmm1 vmovdqu %xmm0,16(%rsp) vmovdqu %xmm1,32(%rsp) L$beeu_loop: xorq %rbx,%rbx orq 48(%rsp),%rbx orq 56(%rsp),%rbx orq 64(%rsp),%rbx orq 72(%rsp),%rbx jz L$beeu_loop_end movq $1,%rcx L$beeu_shift_loop_XB: movq %rcx,%rbx andq 48(%rsp),%rbx jnz L$beeu_shift_loop_end_XB movq $1,%rbx andq %r8,%rbx jz L$shift1_0 addq 0(%rdx),%r8 adcq 8(%rdx),%r9 adcq 16(%rdx),%r10 adcq 24(%rdx),%r11 adcq $0,%rdi L$shift1_0: shrdq $1,%r9,%r8 shrdq $1,%r10,%r9 shrdq $1,%r11,%r10 shrdq $1,%rdi,%r11 shrq $1,%rdi shlq $1,%rcx cmpq $0x8000000,%rcx jne L$beeu_shift_loop_XB L$beeu_shift_loop_end_XB: bsfq %rcx,%rcx testq %rcx,%rcx jz L$beeu_no_shift_XB movq 8+48(%rsp),%rax movq 16+48(%rsp),%rbx movq 24+48(%rsp),%rsi shrdq %cl,%rax,0+48(%rsp) shrdq %cl,%rbx,8+48(%rsp) shrdq %cl,%rsi,16+48(%rsp) shrq %cl,%rsi movq %rsi,24+48(%rsp) L$beeu_no_shift_XB: movq $1,%rcx L$beeu_shift_loop_YA: movq %rcx,%rbx andq 16(%rsp),%rbx jnz L$beeu_shift_loop_end_YA movq $1,%rbx andq %r12,%rbx jz L$shift1_1 addq 0(%rdx),%r12 adcq 8(%rdx),%r13 adcq 16(%rdx),%r14 adcq 24(%rdx),%r15 adcq $0,%rbp L$shift1_1: shrdq $1,%r13,%r12 shrdq $1,%r14,%r13 shrdq $1,%r15,%r14 shrdq $1,%rbp,%r15 shrq $1,%rbp shlq $1,%rcx cmpq $0x8000000,%rcx jne L$beeu_shift_loop_YA L$beeu_shift_loop_end_YA: bsfq %rcx,%rcx testq %rcx,%rcx jz L$beeu_no_shift_YA movq 8+16(%rsp),%rax movq 16+16(%rsp),%rbx movq 24+16(%rsp),%rsi shrdq %cl,%rax,0+16(%rsp) shrdq %cl,%rbx,8+16(%rsp) shrdq %cl,%rsi,16+16(%rsp) shrq %cl,%rsi movq %rsi,24+16(%rsp) L$beeu_no_shift_YA: movq 48(%rsp),%rax movq 56(%rsp),%rbx movq 64(%rsp),%rsi movq 72(%rsp),%rcx subq 16(%rsp),%rax sbbq 24(%rsp),%rbx sbbq 32(%rsp),%rsi sbbq 40(%rsp),%rcx jnc L$beeu_B_bigger_than_A movq 16(%rsp),%rax movq 24(%rsp),%rbx movq 32(%rsp),%rsi movq 40(%rsp),%rcx subq 48(%rsp),%rax sbbq 56(%rsp),%rbx sbbq 64(%rsp),%rsi sbbq 72(%rsp),%rcx movq %rax,16(%rsp) movq %rbx,24(%rsp) movq %rsi,32(%rsp) movq %rcx,40(%rsp) addq %r8,%r12 adcq %r9,%r13 adcq %r10,%r14 adcq %r11,%r15 adcq %rdi,%rbp jmp L$beeu_loop L$beeu_B_bigger_than_A: movq %rax,48(%rsp) movq %rbx,56(%rsp) movq %rsi,64(%rsp) movq %rcx,72(%rsp) addq %r12,%r8 adcq %r13,%r9 adcq %r14,%r10 adcq %r15,%r11 adcq %rbp,%rdi jmp L$beeu_loop L$beeu_loop_end: movq 16(%rsp),%rbx subq $1,%rbx orq 24(%rsp),%rbx orq 32(%rsp),%rbx orq 40(%rsp),%rbx jnz L$beeu_err movq 0(%rdx),%r8 movq 8(%rdx),%r9 movq 16(%rdx),%r10 movq 24(%rdx),%r11 xorq %rdi,%rdi L$beeu_reduction_loop: movq %r12,16(%rsp) movq %r13,24(%rsp) movq %r14,32(%rsp) movq %r15,40(%rsp) movq %rbp,48(%rsp) subq %r8,%r12 sbbq %r9,%r13 sbbq %r10,%r14 sbbq %r11,%r15 sbbq $0,%rbp cmovcq 16(%rsp),%r12 cmovcq 24(%rsp),%r13 cmovcq 32(%rsp),%r14 cmovcq 40(%rsp),%r15 jnc L$beeu_reduction_loop subq %r12,%r8 sbbq %r13,%r9 sbbq %r14,%r10 sbbq %r15,%r11 L$beeu_save: movq 0(%rsp),%rdi movq %r8,0(%rdi) movq %r9,8(%rdi) movq %r10,16(%rdi) movq %r11,24(%rdi) movq $1,%rax jmp L$beeu_finish L$beeu_err: xorq %rax,%rax L$beeu_finish: addq $80,%rsp popq %rsi popq %rbx popq %r15 popq %r14 popq %r13 popq %r12 popq %rbp .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
18,376
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .p2align 5 _aesni_ctr32_ghash_6x: vmovdqu 32(%r11),%xmm2 subq $6,%rdx vpxor %xmm4,%xmm4,%xmm4 vmovdqu 0-128(%rcx),%xmm15 vpaddb %xmm2,%xmm1,%xmm10 vpaddb %xmm2,%xmm10,%xmm11 vpaddb %xmm2,%xmm11,%xmm12 vpaddb %xmm2,%xmm12,%xmm13 vpaddb %xmm2,%xmm13,%xmm14 vpxor %xmm15,%xmm1,%xmm9 vmovdqu %xmm4,16+8(%rsp) jmp L$oop6x .p2align 5 L$oop6x: addl $100663296,%ebx jc L$handle_ctr32 vmovdqu 0-32(%r9),%xmm3 vpaddb %xmm2,%xmm14,%xmm1 vpxor %xmm15,%xmm10,%xmm10 vpxor %xmm15,%xmm11,%xmm11 L$resume_ctr32: vmovdqu %xmm1,(%r8) vpclmulqdq $0x10,%xmm3,%xmm7,%xmm5 vpxor %xmm15,%xmm12,%xmm12 vmovups 16-128(%rcx),%xmm2 vpclmulqdq $0x01,%xmm3,%xmm7,%xmm6 xorq %r12,%r12 cmpq %r14,%r15 vaesenc %xmm2,%xmm9,%xmm9 vmovdqu 48+8(%rsp),%xmm0 vpxor %xmm15,%xmm13,%xmm13 vpclmulqdq $0x00,%xmm3,%xmm7,%xmm1 vaesenc %xmm2,%xmm10,%xmm10 vpxor %xmm15,%xmm14,%xmm14 setnc %r12b vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vmovdqu 16-32(%r9),%xmm3 negq %r12 vaesenc %xmm2,%xmm12,%xmm12 vpxor %xmm5,%xmm6,%xmm6 vpclmulqdq $0x00,%xmm3,%xmm0,%xmm5 vpxor %xmm4,%xmm8,%xmm8 vaesenc %xmm2,%xmm13,%xmm13 vpxor %xmm5,%xmm1,%xmm4 andq $0x60,%r12 vmovups 32-128(%rcx),%xmm15 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm1 vaesenc %xmm2,%xmm14,%xmm14 vpclmulqdq $0x01,%xmm3,%xmm0,%xmm2 leaq (%r14,%r12,1),%r14 vaesenc %xmm15,%xmm9,%xmm9 vpxor 16+8(%rsp),%xmm8,%xmm8 vpclmulqdq $0x11,%xmm3,%xmm0,%xmm3 vmovdqu 64+8(%rsp),%xmm0 vaesenc %xmm15,%xmm10,%xmm10 movbeq 88(%r14),%r13 vaesenc %xmm15,%xmm11,%xmm11 movbeq 80(%r14),%r12 vaesenc %xmm15,%xmm12,%xmm12 movq %r13,32+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 movq %r12,40+8(%rsp) vmovdqu 48-32(%r9),%xmm5 vaesenc %xmm15,%xmm14,%xmm14 vmovups 48-128(%rcx),%xmm15 vpxor %xmm1,%xmm6,%xmm6 vpclmulqdq $0x00,%xmm5,%xmm0,%xmm1 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm2,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm5,%xmm0,%xmm2 vaesenc %xmm15,%xmm10,%xmm10 vpxor %xmm3,%xmm7,%xmm7 vpclmulqdq $0x01,%xmm5,%xmm0,%xmm3 vaesenc %xmm15,%xmm11,%xmm11 vpclmulqdq $0x11,%xmm5,%xmm0,%xmm5 vmovdqu 80+8(%rsp),%xmm0 vaesenc %xmm15,%xmm12,%xmm12 vaesenc %xmm15,%xmm13,%xmm13 vpxor %xmm1,%xmm4,%xmm4 vmovdqu 64-32(%r9),%xmm1 vaesenc %xmm15,%xmm14,%xmm14 vmovups 64-128(%rcx),%xmm15 vpxor %xmm2,%xmm6,%xmm6 vpclmulqdq $0x00,%xmm1,%xmm0,%xmm2 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm3,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm1,%xmm0,%xmm3 vaesenc %xmm15,%xmm10,%xmm10 movbeq 72(%r14),%r13 vpxor %xmm5,%xmm7,%xmm7 vpclmulqdq $0x01,%xmm1,%xmm0,%xmm5 vaesenc %xmm15,%xmm11,%xmm11 movbeq 64(%r14),%r12 vpclmulqdq $0x11,%xmm1,%xmm0,%xmm1 vmovdqu 96+8(%rsp),%xmm0 vaesenc %xmm15,%xmm12,%xmm12 movq %r13,48+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 movq %r12,56+8(%rsp) vpxor %xmm2,%xmm4,%xmm4 vmovdqu 96-32(%r9),%xmm2 vaesenc %xmm15,%xmm14,%xmm14 vmovups 80-128(%rcx),%xmm15 vpxor %xmm3,%xmm6,%xmm6 vpclmulqdq $0x00,%xmm2,%xmm0,%xmm3 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm5,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm2,%xmm0,%xmm5 vaesenc %xmm15,%xmm10,%xmm10 movbeq 56(%r14),%r13 vpxor %xmm1,%xmm7,%xmm7 vpclmulqdq $0x01,%xmm2,%xmm0,%xmm1 vpxor 112+8(%rsp),%xmm8,%xmm8 vaesenc %xmm15,%xmm11,%xmm11 movbeq 48(%r14),%r12 vpclmulqdq $0x11,%xmm2,%xmm0,%xmm2 vaesenc %xmm15,%xmm12,%xmm12 movq %r13,64+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 movq %r12,72+8(%rsp) vpxor %xmm3,%xmm4,%xmm4 vmovdqu 112-32(%r9),%xmm3 vaesenc %xmm15,%xmm14,%xmm14 vmovups 96-128(%rcx),%xmm15 vpxor %xmm5,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm3,%xmm8,%xmm5 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm1,%xmm6,%xmm6 vpclmulqdq $0x01,%xmm3,%xmm8,%xmm1 vaesenc %xmm15,%xmm10,%xmm10 movbeq 40(%r14),%r13 vpxor %xmm2,%xmm7,%xmm7 vpclmulqdq $0x00,%xmm3,%xmm8,%xmm2 vaesenc %xmm15,%xmm11,%xmm11 movbeq 32(%r14),%r12 vpclmulqdq $0x11,%xmm3,%xmm8,%xmm8 vaesenc %xmm15,%xmm12,%xmm12 movq %r13,80+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 movq %r12,88+8(%rsp) vpxor %xmm5,%xmm6,%xmm6 vaesenc %xmm15,%xmm14,%xmm14 vpxor %xmm1,%xmm6,%xmm6 vmovups 112-128(%rcx),%xmm15 vpslldq $8,%xmm6,%xmm5 vpxor %xmm2,%xmm4,%xmm4 vmovdqu 16(%r11),%xmm3 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm8,%xmm7,%xmm7 vaesenc %xmm15,%xmm10,%xmm10 vpxor %xmm5,%xmm4,%xmm4 movbeq 24(%r14),%r13 vaesenc %xmm15,%xmm11,%xmm11 movbeq 16(%r14),%r12 vpalignr $8,%xmm4,%xmm4,%xmm0 vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4 movq %r13,96+8(%rsp) vaesenc %xmm15,%xmm12,%xmm12 movq %r12,104+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 vmovups 128-128(%rcx),%xmm1 vaesenc %xmm15,%xmm14,%xmm14 vaesenc %xmm1,%xmm9,%xmm9 vmovups 144-128(%rcx),%xmm15 vaesenc %xmm1,%xmm10,%xmm10 vpsrldq $8,%xmm6,%xmm6 vaesenc %xmm1,%xmm11,%xmm11 vpxor %xmm6,%xmm7,%xmm7 vaesenc %xmm1,%xmm12,%xmm12 vpxor %xmm0,%xmm4,%xmm4 movbeq 8(%r14),%r13 vaesenc %xmm1,%xmm13,%xmm13 movbeq 0(%r14),%r12 vaesenc %xmm1,%xmm14,%xmm14 vmovups 160-128(%rcx),%xmm1 cmpl $11,%r10d jb L$enc_tail vaesenc %xmm15,%xmm9,%xmm9 vaesenc %xmm15,%xmm10,%xmm10 vaesenc %xmm15,%xmm11,%xmm11 vaesenc %xmm15,%xmm12,%xmm12 vaesenc %xmm15,%xmm13,%xmm13 vaesenc %xmm15,%xmm14,%xmm14 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovups 176-128(%rcx),%xmm15 vaesenc %xmm1,%xmm14,%xmm14 vmovups 192-128(%rcx),%xmm1 je L$enc_tail vaesenc %xmm15,%xmm9,%xmm9 vaesenc %xmm15,%xmm10,%xmm10 vaesenc %xmm15,%xmm11,%xmm11 vaesenc %xmm15,%xmm12,%xmm12 vaesenc %xmm15,%xmm13,%xmm13 vaesenc %xmm15,%xmm14,%xmm14 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovups 208-128(%rcx),%xmm15 vaesenc %xmm1,%xmm14,%xmm14 vmovups 224-128(%rcx),%xmm1 jmp L$enc_tail .p2align 5 L$handle_ctr32: vmovdqu (%r11),%xmm0 vpshufb %xmm0,%xmm1,%xmm6 vmovdqu 48(%r11),%xmm5 vpaddd 64(%r11),%xmm6,%xmm10 vpaddd %xmm5,%xmm6,%xmm11 vmovdqu 0-32(%r9),%xmm3 vpaddd %xmm5,%xmm10,%xmm12 vpshufb %xmm0,%xmm10,%xmm10 vpaddd %xmm5,%xmm11,%xmm13 vpshufb %xmm0,%xmm11,%xmm11 vpxor %xmm15,%xmm10,%xmm10 vpaddd %xmm5,%xmm12,%xmm14 vpshufb %xmm0,%xmm12,%xmm12 vpxor %xmm15,%xmm11,%xmm11 vpaddd %xmm5,%xmm13,%xmm1 vpshufb %xmm0,%xmm13,%xmm13 vpshufb %xmm0,%xmm14,%xmm14 vpshufb %xmm0,%xmm1,%xmm1 jmp L$resume_ctr32 .p2align 5 L$enc_tail: vaesenc %xmm15,%xmm9,%xmm9 vmovdqu %xmm7,16+8(%rsp) vpalignr $8,%xmm4,%xmm4,%xmm8 vaesenc %xmm15,%xmm10,%xmm10 vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4 vpxor 0(%rdi),%xmm1,%xmm2 vaesenc %xmm15,%xmm11,%xmm11 vpxor 16(%rdi),%xmm1,%xmm0 vaesenc %xmm15,%xmm12,%xmm12 vpxor 32(%rdi),%xmm1,%xmm5 vaesenc %xmm15,%xmm13,%xmm13 vpxor 48(%rdi),%xmm1,%xmm6 vaesenc %xmm15,%xmm14,%xmm14 vpxor 64(%rdi),%xmm1,%xmm7 vpxor 80(%rdi),%xmm1,%xmm3 vmovdqu (%r8),%xmm1 vaesenclast %xmm2,%xmm9,%xmm9 vmovdqu 32(%r11),%xmm2 vaesenclast %xmm0,%xmm10,%xmm10 vpaddb %xmm2,%xmm1,%xmm0 movq %r13,112+8(%rsp) leaq 96(%rdi),%rdi prefetcht0 512(%rdi) prefetcht0 576(%rdi) vaesenclast %xmm5,%xmm11,%xmm11 vpaddb %xmm2,%xmm0,%xmm5 movq %r12,120+8(%rsp) leaq 96(%rsi),%rsi vmovdqu 0-128(%rcx),%xmm15 vaesenclast %xmm6,%xmm12,%xmm12 vpaddb %xmm2,%xmm5,%xmm6 vaesenclast %xmm7,%xmm13,%xmm13 vpaddb %xmm2,%xmm6,%xmm7 vaesenclast %xmm3,%xmm14,%xmm14 vpaddb %xmm2,%xmm7,%xmm3 addq $0x60,%rax subq $0x6,%rdx jc L$6x_done vmovups %xmm9,-96(%rsi) vpxor %xmm15,%xmm1,%xmm9 vmovups %xmm10,-80(%rsi) vmovdqa %xmm0,%xmm10 vmovups %xmm11,-64(%rsi) vmovdqa %xmm5,%xmm11 vmovups %xmm12,-48(%rsi) vmovdqa %xmm6,%xmm12 vmovups %xmm13,-32(%rsi) vmovdqa %xmm7,%xmm13 vmovups %xmm14,-16(%rsi) vmovdqa %xmm3,%xmm14 vmovdqu 32+8(%rsp),%xmm7 jmp L$oop6x L$6x_done: vpxor 16+8(%rsp),%xmm8,%xmm8 vpxor %xmm4,%xmm8,%xmm8 .byte 0xf3,0xc3 .globl _aesni_gcm_decrypt .private_extern _aesni_gcm_decrypt .p2align 5 _aesni_gcm_decrypt: _CET_ENDBR xorq %rax,%rax cmpq $0x60,%rdx jb L$gcm_dec_abort pushq %rbp movq %rsp,%rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 vzeroupper movq 16(%rbp),%r12 vmovdqu (%r8),%xmm1 addq $-128,%rsp movl 12(%r8),%ebx leaq L$bswap_mask(%rip),%r11 leaq -128(%rcx),%r14 movq $0xf80,%r15 vmovdqu (%r12),%xmm8 andq $-128,%rsp vmovdqu (%r11),%xmm0 leaq 128(%rcx),%rcx leaq 32(%r9),%r9 movl 240-128(%rcx),%r10d vpshufb %xmm0,%xmm8,%xmm8 andq %r15,%r14 andq %rsp,%r15 subq %r14,%r15 jc L$dec_no_key_aliasing cmpq $768,%r15 jnc L$dec_no_key_aliasing subq %r15,%rsp L$dec_no_key_aliasing: vmovdqu 80(%rdi),%xmm7 movq %rdi,%r14 vmovdqu 64(%rdi),%xmm4 leaq -192(%rdi,%rdx,1),%r15 vmovdqu 48(%rdi),%xmm5 shrq $4,%rdx xorq %rax,%rax vmovdqu 32(%rdi),%xmm6 vpshufb %xmm0,%xmm7,%xmm7 vmovdqu 16(%rdi),%xmm2 vpshufb %xmm0,%xmm4,%xmm4 vmovdqu (%rdi),%xmm3 vpshufb %xmm0,%xmm5,%xmm5 vmovdqu %xmm4,48(%rsp) vpshufb %xmm0,%xmm6,%xmm6 vmovdqu %xmm5,64(%rsp) vpshufb %xmm0,%xmm2,%xmm2 vmovdqu %xmm6,80(%rsp) vpshufb %xmm0,%xmm3,%xmm3 vmovdqu %xmm2,96(%rsp) vmovdqu %xmm3,112(%rsp) call _aesni_ctr32_ghash_6x movq 16(%rbp),%r12 vmovups %xmm9,-96(%rsi) vmovups %xmm10,-80(%rsi) vmovups %xmm11,-64(%rsi) vmovups %xmm12,-48(%rsi) vmovups %xmm13,-32(%rsi) vmovups %xmm14,-16(%rsi) vpshufb (%r11),%xmm8,%xmm8 vmovdqu %xmm8,(%r12) vzeroupper leaq -40(%rbp),%rsp popq %r15 popq %r14 popq %r13 popq %r12 popq %rbx popq %rbp L$gcm_dec_abort: .byte 0xf3,0xc3 .p2align 5 _aesni_ctr32_6x: vmovdqu 0-128(%rcx),%xmm4 vmovdqu 32(%r11),%xmm2 leaq -1(%r10),%r13 vmovups 16-128(%rcx),%xmm15 leaq 32-128(%rcx),%r12 vpxor %xmm4,%xmm1,%xmm9 addl $100663296,%ebx jc L$handle_ctr32_2 vpaddb %xmm2,%xmm1,%xmm10 vpaddb %xmm2,%xmm10,%xmm11 vpxor %xmm4,%xmm10,%xmm10 vpaddb %xmm2,%xmm11,%xmm12 vpxor %xmm4,%xmm11,%xmm11 vpaddb %xmm2,%xmm12,%xmm13 vpxor %xmm4,%xmm12,%xmm12 vpaddb %xmm2,%xmm13,%xmm14 vpxor %xmm4,%xmm13,%xmm13 vpaddb %xmm2,%xmm14,%xmm1 vpxor %xmm4,%xmm14,%xmm14 jmp L$oop_ctr32 .p2align 4 L$oop_ctr32: vaesenc %xmm15,%xmm9,%xmm9 vaesenc %xmm15,%xmm10,%xmm10 vaesenc %xmm15,%xmm11,%xmm11 vaesenc %xmm15,%xmm12,%xmm12 vaesenc %xmm15,%xmm13,%xmm13 vaesenc %xmm15,%xmm14,%xmm14 vmovups (%r12),%xmm15 leaq 16(%r12),%r12 decl %r13d jnz L$oop_ctr32 vmovdqu (%r12),%xmm3 vaesenc %xmm15,%xmm9,%xmm9 vpxor 0(%rdi),%xmm3,%xmm4 vaesenc %xmm15,%xmm10,%xmm10 vpxor 16(%rdi),%xmm3,%xmm5 vaesenc %xmm15,%xmm11,%xmm11 vpxor 32(%rdi),%xmm3,%xmm6 vaesenc %xmm15,%xmm12,%xmm12 vpxor 48(%rdi),%xmm3,%xmm8 vaesenc %xmm15,%xmm13,%xmm13 vpxor 64(%rdi),%xmm3,%xmm2 vaesenc %xmm15,%xmm14,%xmm14 vpxor 80(%rdi),%xmm3,%xmm3 leaq 96(%rdi),%rdi vaesenclast %xmm4,%xmm9,%xmm9 vaesenclast %xmm5,%xmm10,%xmm10 vaesenclast %xmm6,%xmm11,%xmm11 vaesenclast %xmm8,%xmm12,%xmm12 vaesenclast %xmm2,%xmm13,%xmm13 vaesenclast %xmm3,%xmm14,%xmm14 vmovups %xmm9,0(%rsi) vmovups %xmm10,16(%rsi) vmovups %xmm11,32(%rsi) vmovups %xmm12,48(%rsi) vmovups %xmm13,64(%rsi) vmovups %xmm14,80(%rsi) leaq 96(%rsi),%rsi .byte 0xf3,0xc3 .p2align 5 L$handle_ctr32_2: vpshufb %xmm0,%xmm1,%xmm6 vmovdqu 48(%r11),%xmm5 vpaddd 64(%r11),%xmm6,%xmm10 vpaddd %xmm5,%xmm6,%xmm11 vpaddd %xmm5,%xmm10,%xmm12 vpshufb %xmm0,%xmm10,%xmm10 vpaddd %xmm5,%xmm11,%xmm13 vpshufb %xmm0,%xmm11,%xmm11 vpxor %xmm4,%xmm10,%xmm10 vpaddd %xmm5,%xmm12,%xmm14 vpshufb %xmm0,%xmm12,%xmm12 vpxor %xmm4,%xmm11,%xmm11 vpaddd %xmm5,%xmm13,%xmm1 vpshufb %xmm0,%xmm13,%xmm13 vpxor %xmm4,%xmm12,%xmm12 vpshufb %xmm0,%xmm14,%xmm14 vpxor %xmm4,%xmm13,%xmm13 vpshufb %xmm0,%xmm1,%xmm1 vpxor %xmm4,%xmm14,%xmm14 jmp L$oop_ctr32 .globl _aesni_gcm_encrypt .private_extern _aesni_gcm_encrypt .p2align 5 _aesni_gcm_encrypt: _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST movb $1,_BORINGSSL_function_hit+2(%rip) #endif xorq %rax,%rax cmpq $288,%rdx jb L$gcm_enc_abort pushq %rbp movq %rsp,%rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 vzeroupper vmovdqu (%r8),%xmm1 addq $-128,%rsp movl 12(%r8),%ebx leaq L$bswap_mask(%rip),%r11 leaq -128(%rcx),%r14 movq $0xf80,%r15 leaq 128(%rcx),%rcx vmovdqu (%r11),%xmm0 andq $-128,%rsp movl 240-128(%rcx),%r10d andq %r15,%r14 andq %rsp,%r15 subq %r14,%r15 jc L$enc_no_key_aliasing cmpq $768,%r15 jnc L$enc_no_key_aliasing subq %r15,%rsp L$enc_no_key_aliasing: movq %rsi,%r14 leaq -192(%rsi,%rdx,1),%r15 shrq $4,%rdx call _aesni_ctr32_6x vpshufb %xmm0,%xmm9,%xmm8 vpshufb %xmm0,%xmm10,%xmm2 vmovdqu %xmm8,112(%rsp) vpshufb %xmm0,%xmm11,%xmm4 vmovdqu %xmm2,96(%rsp) vpshufb %xmm0,%xmm12,%xmm5 vmovdqu %xmm4,80(%rsp) vpshufb %xmm0,%xmm13,%xmm6 vmovdqu %xmm5,64(%rsp) vpshufb %xmm0,%xmm14,%xmm7 vmovdqu %xmm6,48(%rsp) call _aesni_ctr32_6x movq 16(%rbp),%r12 leaq 32(%r9),%r9 vmovdqu (%r12),%xmm8 subq $12,%rdx movq $192,%rax vpshufb %xmm0,%xmm8,%xmm8 call _aesni_ctr32_ghash_6x vmovdqu 32(%rsp),%xmm7 vmovdqu (%r11),%xmm0 vmovdqu 0-32(%r9),%xmm3 vpunpckhqdq %xmm7,%xmm7,%xmm1 vmovdqu 32-32(%r9),%xmm15 vmovups %xmm9,-96(%rsi) vpshufb %xmm0,%xmm9,%xmm9 vpxor %xmm7,%xmm1,%xmm1 vmovups %xmm10,-80(%rsi) vpshufb %xmm0,%xmm10,%xmm10 vmovups %xmm11,-64(%rsi) vpshufb %xmm0,%xmm11,%xmm11 vmovups %xmm12,-48(%rsi) vpshufb %xmm0,%xmm12,%xmm12 vmovups %xmm13,-32(%rsi) vpshufb %xmm0,%xmm13,%xmm13 vmovups %xmm14,-16(%rsi) vpshufb %xmm0,%xmm14,%xmm14 vmovdqu %xmm9,16(%rsp) vmovdqu 48(%rsp),%xmm6 vmovdqu 16-32(%r9),%xmm0 vpunpckhqdq %xmm6,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm3,%xmm7,%xmm5 vpxor %xmm6,%xmm2,%xmm2 vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7 vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1 vmovdqu 64(%rsp),%xmm9 vpclmulqdq $0x00,%xmm0,%xmm6,%xmm4 vmovdqu 48-32(%r9),%xmm3 vpxor %xmm5,%xmm4,%xmm4 vpunpckhqdq %xmm9,%xmm9,%xmm5 vpclmulqdq $0x11,%xmm0,%xmm6,%xmm6 vpxor %xmm9,%xmm5,%xmm5 vpxor %xmm7,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2 vmovdqu 80-32(%r9),%xmm15 vpxor %xmm1,%xmm2,%xmm2 vmovdqu 80(%rsp),%xmm1 vpclmulqdq $0x00,%xmm3,%xmm9,%xmm7 vmovdqu 64-32(%r9),%xmm0 vpxor %xmm4,%xmm7,%xmm7 vpunpckhqdq %xmm1,%xmm1,%xmm4 vpclmulqdq $0x11,%xmm3,%xmm9,%xmm9 vpxor %xmm1,%xmm4,%xmm4 vpxor %xmm6,%xmm9,%xmm9 vpclmulqdq $0x00,%xmm15,%xmm5,%xmm5 vpxor %xmm2,%xmm5,%xmm5 vmovdqu 96(%rsp),%xmm2 vpclmulqdq $0x00,%xmm0,%xmm1,%xmm6 vmovdqu 96-32(%r9),%xmm3 vpxor %xmm7,%xmm6,%xmm6 vpunpckhqdq %xmm2,%xmm2,%xmm7 vpclmulqdq $0x11,%xmm0,%xmm1,%xmm1 vpxor %xmm2,%xmm7,%xmm7 vpxor %xmm9,%xmm1,%xmm1 vpclmulqdq $0x10,%xmm15,%xmm4,%xmm4 vmovdqu 128-32(%r9),%xmm15 vpxor %xmm5,%xmm4,%xmm4 vpxor 112(%rsp),%xmm8,%xmm8 vpclmulqdq $0x00,%xmm3,%xmm2,%xmm5 vmovdqu 112-32(%r9),%xmm0 vpunpckhqdq %xmm8,%xmm8,%xmm9 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x11,%xmm3,%xmm2,%xmm2 vpxor %xmm8,%xmm9,%xmm9 vpxor %xmm1,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm15,%xmm7,%xmm7 vpxor %xmm4,%xmm7,%xmm4 vpclmulqdq $0x00,%xmm0,%xmm8,%xmm6 vmovdqu 0-32(%r9),%xmm3 vpunpckhqdq %xmm14,%xmm14,%xmm1 vpclmulqdq $0x11,%xmm0,%xmm8,%xmm8 vpxor %xmm14,%xmm1,%xmm1 vpxor %xmm5,%xmm6,%xmm5 vpclmulqdq $0x10,%xmm15,%xmm9,%xmm9 vmovdqu 32-32(%r9),%xmm15 vpxor %xmm2,%xmm8,%xmm7 vpxor %xmm4,%xmm9,%xmm6 vmovdqu 16-32(%r9),%xmm0 vpxor %xmm5,%xmm7,%xmm9 vpclmulqdq $0x00,%xmm3,%xmm14,%xmm4 vpxor %xmm9,%xmm6,%xmm6 vpunpckhqdq %xmm13,%xmm13,%xmm2 vpclmulqdq $0x11,%xmm3,%xmm14,%xmm14 vpxor %xmm13,%xmm2,%xmm2 vpslldq $8,%xmm6,%xmm9 vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1 vpxor %xmm9,%xmm5,%xmm8 vpsrldq $8,%xmm6,%xmm6 vpxor %xmm6,%xmm7,%xmm7 vpclmulqdq $0x00,%xmm0,%xmm13,%xmm5 vmovdqu 48-32(%r9),%xmm3 vpxor %xmm4,%xmm5,%xmm5 vpunpckhqdq %xmm12,%xmm12,%xmm9 vpclmulqdq $0x11,%xmm0,%xmm13,%xmm13 vpxor %xmm12,%xmm9,%xmm9 vpxor %xmm14,%xmm13,%xmm13 vpalignr $8,%xmm8,%xmm8,%xmm14 vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2 vmovdqu 80-32(%r9),%xmm15 vpxor %xmm1,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm3,%xmm12,%xmm4 vmovdqu 64-32(%r9),%xmm0 vpxor %xmm5,%xmm4,%xmm4 vpunpckhqdq %xmm11,%xmm11,%xmm1 vpclmulqdq $0x11,%xmm3,%xmm12,%xmm12 vpxor %xmm11,%xmm1,%xmm1 vpxor %xmm13,%xmm12,%xmm12 vxorps 16(%rsp),%xmm7,%xmm7 vpclmulqdq $0x00,%xmm15,%xmm9,%xmm9 vpxor %xmm2,%xmm9,%xmm9 vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8 vxorps %xmm14,%xmm8,%xmm8 vpclmulqdq $0x00,%xmm0,%xmm11,%xmm5 vmovdqu 96-32(%r9),%xmm3 vpxor %xmm4,%xmm5,%xmm5 vpunpckhqdq %xmm10,%xmm10,%xmm2 vpclmulqdq $0x11,%xmm0,%xmm11,%xmm11 vpxor %xmm10,%xmm2,%xmm2 vpalignr $8,%xmm8,%xmm8,%xmm14 vpxor %xmm12,%xmm11,%xmm11 vpclmulqdq $0x10,%xmm15,%xmm1,%xmm1 vmovdqu 128-32(%r9),%xmm15 vpxor %xmm9,%xmm1,%xmm1 vxorps %xmm7,%xmm14,%xmm14 vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8 vxorps %xmm14,%xmm8,%xmm8 vpclmulqdq $0x00,%xmm3,%xmm10,%xmm4 vmovdqu 112-32(%r9),%xmm0 vpxor %xmm5,%xmm4,%xmm4 vpunpckhqdq %xmm8,%xmm8,%xmm9 vpclmulqdq $0x11,%xmm3,%xmm10,%xmm10 vpxor %xmm8,%xmm9,%xmm9 vpxor %xmm11,%xmm10,%xmm10 vpclmulqdq $0x00,%xmm15,%xmm2,%xmm2 vpxor %xmm1,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm0,%xmm8,%xmm5 vpclmulqdq $0x11,%xmm0,%xmm8,%xmm7 vpxor %xmm4,%xmm5,%xmm5 vpclmulqdq $0x10,%xmm15,%xmm9,%xmm6 vpxor %xmm10,%xmm7,%xmm7 vpxor %xmm2,%xmm6,%xmm6 vpxor %xmm5,%xmm7,%xmm4 vpxor %xmm4,%xmm6,%xmm6 vpslldq $8,%xmm6,%xmm1 vmovdqu 16(%r11),%xmm3 vpsrldq $8,%xmm6,%xmm6 vpxor %xmm1,%xmm5,%xmm8 vpxor %xmm6,%xmm7,%xmm7 vpalignr $8,%xmm8,%xmm8,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8 vpxor %xmm2,%xmm8,%xmm8 vpalignr $8,%xmm8,%xmm8,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8 vpxor %xmm7,%xmm2,%xmm2 vpxor %xmm2,%xmm8,%xmm8 movq 16(%rbp),%r12 vpshufb (%r11),%xmm8,%xmm8 vmovdqu %xmm8,(%r12) vzeroupper leaq -40(%rbp),%rsp popq %r15 popq %r14 popq %r13 popq %r12 popq %rbx popq %rbp L$gcm_enc_abort: .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 L$bswap_mask: .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 L$poly: .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2 L$one_msb: .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 L$two_lsb: .byte 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 L$one_lsb: .byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 .byte 65,69,83,45,78,73,32,71,67,77,32,109,111,100,117,108,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .p2align 6 .text #endif
marvin-hansen/iggy-streaming-system
100,457
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/sha1-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _sha1_block_data_order_nohw .private_extern _sha1_block_data_order_nohw .p2align 4 _sha1_block_data_order_nohw: _CET_ENDBR movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 movq %rdi,%r8 subq $72,%rsp movq %rsi,%r9 andq $-64,%rsp movq %rdx,%r10 movq %rax,64(%rsp) L$prologue: movl 0(%r8),%esi movl 4(%r8),%edi movl 8(%r8),%r11d movl 12(%r8),%r12d movl 16(%r8),%r13d jmp L$loop .p2align 4 L$loop: movl 0(%r9),%edx bswapl %edx movl 4(%r9),%ebp movl %r12d,%eax movl %edx,0(%rsp) movl %esi,%ecx bswapl %ebp xorl %r11d,%eax roll $5,%ecx andl %edi,%eax leal 1518500249(%rdx,%r13,1),%r13d addl %ecx,%r13d xorl %r12d,%eax roll $30,%edi addl %eax,%r13d movl 8(%r9),%r14d movl %r11d,%eax movl %ebp,4(%rsp) movl %r13d,%ecx bswapl %r14d xorl %edi,%eax roll $5,%ecx andl %esi,%eax leal 1518500249(%rbp,%r12,1),%r12d addl %ecx,%r12d xorl %r11d,%eax roll $30,%esi addl %eax,%r12d movl 12(%r9),%edx movl %edi,%eax movl %r14d,8(%rsp) movl %r12d,%ecx bswapl %edx xorl %esi,%eax roll $5,%ecx andl %r13d,%eax leal 1518500249(%r14,%r11,1),%r11d addl %ecx,%r11d xorl %edi,%eax roll $30,%r13d addl %eax,%r11d movl 16(%r9),%ebp movl %esi,%eax movl %edx,12(%rsp) movl %r11d,%ecx bswapl %ebp xorl %r13d,%eax roll $5,%ecx andl %r12d,%eax leal 1518500249(%rdx,%rdi,1),%edi addl %ecx,%edi xorl %esi,%eax roll $30,%r12d addl %eax,%edi movl 20(%r9),%r14d movl %r13d,%eax movl %ebp,16(%rsp) movl %edi,%ecx bswapl %r14d xorl %r12d,%eax roll $5,%ecx andl %r11d,%eax leal 1518500249(%rbp,%rsi,1),%esi addl %ecx,%esi xorl %r13d,%eax roll $30,%r11d addl %eax,%esi movl 24(%r9),%edx movl %r12d,%eax movl %r14d,20(%rsp) movl %esi,%ecx bswapl %edx xorl %r11d,%eax roll $5,%ecx andl %edi,%eax leal 1518500249(%r14,%r13,1),%r13d addl %ecx,%r13d xorl %r12d,%eax roll $30,%edi addl %eax,%r13d movl 28(%r9),%ebp movl %r11d,%eax movl %edx,24(%rsp) movl %r13d,%ecx bswapl %ebp xorl %edi,%eax roll $5,%ecx andl %esi,%eax leal 1518500249(%rdx,%r12,1),%r12d addl %ecx,%r12d xorl %r11d,%eax roll $30,%esi addl %eax,%r12d movl 32(%r9),%r14d movl %edi,%eax movl %ebp,28(%rsp) movl %r12d,%ecx bswapl %r14d xorl %esi,%eax roll $5,%ecx andl %r13d,%eax leal 1518500249(%rbp,%r11,1),%r11d addl %ecx,%r11d xorl %edi,%eax roll $30,%r13d addl %eax,%r11d movl 36(%r9),%edx movl %esi,%eax movl %r14d,32(%rsp) movl %r11d,%ecx bswapl %edx xorl %r13d,%eax roll $5,%ecx andl %r12d,%eax leal 1518500249(%r14,%rdi,1),%edi addl %ecx,%edi xorl %esi,%eax roll $30,%r12d addl %eax,%edi movl 40(%r9),%ebp movl %r13d,%eax movl %edx,36(%rsp) movl %edi,%ecx bswapl %ebp xorl %r12d,%eax roll $5,%ecx andl %r11d,%eax leal 1518500249(%rdx,%rsi,1),%esi addl %ecx,%esi xorl %r13d,%eax roll $30,%r11d addl %eax,%esi movl 44(%r9),%r14d movl %r12d,%eax movl %ebp,40(%rsp) movl %esi,%ecx bswapl %r14d xorl %r11d,%eax roll $5,%ecx andl %edi,%eax leal 1518500249(%rbp,%r13,1),%r13d addl %ecx,%r13d xorl %r12d,%eax roll $30,%edi addl %eax,%r13d movl 48(%r9),%edx movl %r11d,%eax movl %r14d,44(%rsp) movl %r13d,%ecx bswapl %edx xorl %edi,%eax roll $5,%ecx andl %esi,%eax leal 1518500249(%r14,%r12,1),%r12d addl %ecx,%r12d xorl %r11d,%eax roll $30,%esi addl %eax,%r12d movl 52(%r9),%ebp movl %edi,%eax movl %edx,48(%rsp) movl %r12d,%ecx bswapl %ebp xorl %esi,%eax roll $5,%ecx andl %r13d,%eax leal 1518500249(%rdx,%r11,1),%r11d addl %ecx,%r11d xorl %edi,%eax roll $30,%r13d addl %eax,%r11d movl 56(%r9),%r14d movl %esi,%eax movl %ebp,52(%rsp) movl %r11d,%ecx bswapl %r14d xorl %r13d,%eax roll $5,%ecx andl %r12d,%eax leal 1518500249(%rbp,%rdi,1),%edi addl %ecx,%edi xorl %esi,%eax roll $30,%r12d addl %eax,%edi movl 60(%r9),%edx movl %r13d,%eax movl %r14d,56(%rsp) movl %edi,%ecx bswapl %edx xorl %r12d,%eax roll $5,%ecx andl %r11d,%eax leal 1518500249(%r14,%rsi,1),%esi addl %ecx,%esi xorl %r13d,%eax roll $30,%r11d addl %eax,%esi xorl 0(%rsp),%ebp movl %r12d,%eax movl %edx,60(%rsp) movl %esi,%ecx xorl 8(%rsp),%ebp xorl %r11d,%eax roll $5,%ecx xorl 32(%rsp),%ebp andl %edi,%eax leal 1518500249(%rdx,%r13,1),%r13d roll $30,%edi xorl %r12d,%eax addl %ecx,%r13d roll $1,%ebp addl %eax,%r13d xorl 4(%rsp),%r14d movl %r11d,%eax movl %ebp,0(%rsp) movl %r13d,%ecx xorl 12(%rsp),%r14d xorl %edi,%eax roll $5,%ecx xorl 36(%rsp),%r14d andl %esi,%eax leal 1518500249(%rbp,%r12,1),%r12d roll $30,%esi xorl %r11d,%eax addl %ecx,%r12d roll $1,%r14d addl %eax,%r12d xorl 8(%rsp),%edx movl %edi,%eax movl %r14d,4(%rsp) movl %r12d,%ecx xorl 16(%rsp),%edx xorl %esi,%eax roll $5,%ecx xorl 40(%rsp),%edx andl %r13d,%eax leal 1518500249(%r14,%r11,1),%r11d roll $30,%r13d xorl %edi,%eax addl %ecx,%r11d roll $1,%edx addl %eax,%r11d xorl 12(%rsp),%ebp movl %esi,%eax movl %edx,8(%rsp) movl %r11d,%ecx xorl 20(%rsp),%ebp xorl %r13d,%eax roll $5,%ecx xorl 44(%rsp),%ebp andl %r12d,%eax leal 1518500249(%rdx,%rdi,1),%edi roll $30,%r12d xorl %esi,%eax addl %ecx,%edi roll $1,%ebp addl %eax,%edi xorl 16(%rsp),%r14d movl %r13d,%eax movl %ebp,12(%rsp) movl %edi,%ecx xorl 24(%rsp),%r14d xorl %r12d,%eax roll $5,%ecx xorl 48(%rsp),%r14d andl %r11d,%eax leal 1518500249(%rbp,%rsi,1),%esi roll $30,%r11d xorl %r13d,%eax addl %ecx,%esi roll $1,%r14d addl %eax,%esi xorl 20(%rsp),%edx movl %edi,%eax movl %r14d,16(%rsp) movl %esi,%ecx xorl 28(%rsp),%edx xorl %r12d,%eax roll $5,%ecx xorl 52(%rsp),%edx leal 1859775393(%r14,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%edx xorl 24(%rsp),%ebp movl %esi,%eax movl %edx,20(%rsp) movl %r13d,%ecx xorl 32(%rsp),%ebp xorl %r11d,%eax roll $5,%ecx xorl 56(%rsp),%ebp leal 1859775393(%rdx,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%ebp xorl 28(%rsp),%r14d movl %r13d,%eax movl %ebp,24(%rsp) movl %r12d,%ecx xorl 36(%rsp),%r14d xorl %edi,%eax roll $5,%ecx xorl 60(%rsp),%r14d leal 1859775393(%rbp,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%r14d xorl 32(%rsp),%edx movl %r12d,%eax movl %r14d,28(%rsp) movl %r11d,%ecx xorl 40(%rsp),%edx xorl %esi,%eax roll $5,%ecx xorl 0(%rsp),%edx leal 1859775393(%r14,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%edx xorl 36(%rsp),%ebp movl %r11d,%eax movl %edx,32(%rsp) movl %edi,%ecx xorl 44(%rsp),%ebp xorl %r13d,%eax roll $5,%ecx xorl 4(%rsp),%ebp leal 1859775393(%rdx,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%ebp xorl 40(%rsp),%r14d movl %edi,%eax movl %ebp,36(%rsp) movl %esi,%ecx xorl 48(%rsp),%r14d xorl %r12d,%eax roll $5,%ecx xorl 8(%rsp),%r14d leal 1859775393(%rbp,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%r14d xorl 44(%rsp),%edx movl %esi,%eax movl %r14d,40(%rsp) movl %r13d,%ecx xorl 52(%rsp),%edx xorl %r11d,%eax roll $5,%ecx xorl 12(%rsp),%edx leal 1859775393(%r14,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%edx xorl 48(%rsp),%ebp movl %r13d,%eax movl %edx,44(%rsp) movl %r12d,%ecx xorl 56(%rsp),%ebp xorl %edi,%eax roll $5,%ecx xorl 16(%rsp),%ebp leal 1859775393(%rdx,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%ebp xorl 52(%rsp),%r14d movl %r12d,%eax movl %ebp,48(%rsp) movl %r11d,%ecx xorl 60(%rsp),%r14d xorl %esi,%eax roll $5,%ecx xorl 20(%rsp),%r14d leal 1859775393(%rbp,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%r14d xorl 56(%rsp),%edx movl %r11d,%eax movl %r14d,52(%rsp) movl %edi,%ecx xorl 0(%rsp),%edx xorl %r13d,%eax roll $5,%ecx xorl 24(%rsp),%edx leal 1859775393(%r14,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%edx xorl 60(%rsp),%ebp movl %edi,%eax movl %edx,56(%rsp) movl %esi,%ecx xorl 4(%rsp),%ebp xorl %r12d,%eax roll $5,%ecx xorl 28(%rsp),%ebp leal 1859775393(%rdx,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%ebp xorl 0(%rsp),%r14d movl %esi,%eax movl %ebp,60(%rsp) movl %r13d,%ecx xorl 8(%rsp),%r14d xorl %r11d,%eax roll $5,%ecx xorl 32(%rsp),%r14d leal 1859775393(%rbp,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%r14d xorl 4(%rsp),%edx movl %r13d,%eax movl %r14d,0(%rsp) movl %r12d,%ecx xorl 12(%rsp),%edx xorl %edi,%eax roll $5,%ecx xorl 36(%rsp),%edx leal 1859775393(%r14,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%edx xorl 8(%rsp),%ebp movl %r12d,%eax movl %edx,4(%rsp) movl %r11d,%ecx xorl 16(%rsp),%ebp xorl %esi,%eax roll $5,%ecx xorl 40(%rsp),%ebp leal 1859775393(%rdx,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%ebp xorl 12(%rsp),%r14d movl %r11d,%eax movl %ebp,8(%rsp) movl %edi,%ecx xorl 20(%rsp),%r14d xorl %r13d,%eax roll $5,%ecx xorl 44(%rsp),%r14d leal 1859775393(%rbp,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%r14d xorl 16(%rsp),%edx movl %edi,%eax movl %r14d,12(%rsp) movl %esi,%ecx xorl 24(%rsp),%edx xorl %r12d,%eax roll $5,%ecx xorl 48(%rsp),%edx leal 1859775393(%r14,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%edx xorl 20(%rsp),%ebp movl %esi,%eax movl %edx,16(%rsp) movl %r13d,%ecx xorl 28(%rsp),%ebp xorl %r11d,%eax roll $5,%ecx xorl 52(%rsp),%ebp leal 1859775393(%rdx,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%ebp xorl 24(%rsp),%r14d movl %r13d,%eax movl %ebp,20(%rsp) movl %r12d,%ecx xorl 32(%rsp),%r14d xorl %edi,%eax roll $5,%ecx xorl 56(%rsp),%r14d leal 1859775393(%rbp,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%r14d xorl 28(%rsp),%edx movl %r12d,%eax movl %r14d,24(%rsp) movl %r11d,%ecx xorl 36(%rsp),%edx xorl %esi,%eax roll $5,%ecx xorl 60(%rsp),%edx leal 1859775393(%r14,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%edx xorl 32(%rsp),%ebp movl %r11d,%eax movl %edx,28(%rsp) movl %edi,%ecx xorl 40(%rsp),%ebp xorl %r13d,%eax roll $5,%ecx xorl 0(%rsp),%ebp leal 1859775393(%rdx,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%ebp xorl 36(%rsp),%r14d movl %r12d,%eax movl %ebp,32(%rsp) movl %r12d,%ebx xorl 44(%rsp),%r14d andl %r11d,%eax movl %esi,%ecx xorl 4(%rsp),%r14d leal -1894007588(%rbp,%r13,1),%r13d xorl %r11d,%ebx roll $5,%ecx addl %eax,%r13d roll $1,%r14d andl %edi,%ebx addl %ecx,%r13d roll $30,%edi addl %ebx,%r13d xorl 40(%rsp),%edx movl %r11d,%eax movl %r14d,36(%rsp) movl %r11d,%ebx xorl 48(%rsp),%edx andl %edi,%eax movl %r13d,%ecx xorl 8(%rsp),%edx leal -1894007588(%r14,%r12,1),%r12d xorl %edi,%ebx roll $5,%ecx addl %eax,%r12d roll $1,%edx andl %esi,%ebx addl %ecx,%r12d roll $30,%esi addl %ebx,%r12d xorl 44(%rsp),%ebp movl %edi,%eax movl %edx,40(%rsp) movl %edi,%ebx xorl 52(%rsp),%ebp andl %esi,%eax movl %r12d,%ecx xorl 12(%rsp),%ebp leal -1894007588(%rdx,%r11,1),%r11d xorl %esi,%ebx roll $5,%ecx addl %eax,%r11d roll $1,%ebp andl %r13d,%ebx addl %ecx,%r11d roll $30,%r13d addl %ebx,%r11d xorl 48(%rsp),%r14d movl %esi,%eax movl %ebp,44(%rsp) movl %esi,%ebx xorl 56(%rsp),%r14d andl %r13d,%eax movl %r11d,%ecx xorl 16(%rsp),%r14d leal -1894007588(%rbp,%rdi,1),%edi xorl %r13d,%ebx roll $5,%ecx addl %eax,%edi roll $1,%r14d andl %r12d,%ebx addl %ecx,%edi roll $30,%r12d addl %ebx,%edi xorl 52(%rsp),%edx movl %r13d,%eax movl %r14d,48(%rsp) movl %r13d,%ebx xorl 60(%rsp),%edx andl %r12d,%eax movl %edi,%ecx xorl 20(%rsp),%edx leal -1894007588(%r14,%rsi,1),%esi xorl %r12d,%ebx roll $5,%ecx addl %eax,%esi roll $1,%edx andl %r11d,%ebx addl %ecx,%esi roll $30,%r11d addl %ebx,%esi xorl 56(%rsp),%ebp movl %r12d,%eax movl %edx,52(%rsp) movl %r12d,%ebx xorl 0(%rsp),%ebp andl %r11d,%eax movl %esi,%ecx xorl 24(%rsp),%ebp leal -1894007588(%rdx,%r13,1),%r13d xorl %r11d,%ebx roll $5,%ecx addl %eax,%r13d roll $1,%ebp andl %edi,%ebx addl %ecx,%r13d roll $30,%edi addl %ebx,%r13d xorl 60(%rsp),%r14d movl %r11d,%eax movl %ebp,56(%rsp) movl %r11d,%ebx xorl 4(%rsp),%r14d andl %edi,%eax movl %r13d,%ecx xorl 28(%rsp),%r14d leal -1894007588(%rbp,%r12,1),%r12d xorl %edi,%ebx roll $5,%ecx addl %eax,%r12d roll $1,%r14d andl %esi,%ebx addl %ecx,%r12d roll $30,%esi addl %ebx,%r12d xorl 0(%rsp),%edx movl %edi,%eax movl %r14d,60(%rsp) movl %edi,%ebx xorl 8(%rsp),%edx andl %esi,%eax movl %r12d,%ecx xorl 32(%rsp),%edx leal -1894007588(%r14,%r11,1),%r11d xorl %esi,%ebx roll $5,%ecx addl %eax,%r11d roll $1,%edx andl %r13d,%ebx addl %ecx,%r11d roll $30,%r13d addl %ebx,%r11d xorl 4(%rsp),%ebp movl %esi,%eax movl %edx,0(%rsp) movl %esi,%ebx xorl 12(%rsp),%ebp andl %r13d,%eax movl %r11d,%ecx xorl 36(%rsp),%ebp leal -1894007588(%rdx,%rdi,1),%edi xorl %r13d,%ebx roll $5,%ecx addl %eax,%edi roll $1,%ebp andl %r12d,%ebx addl %ecx,%edi roll $30,%r12d addl %ebx,%edi xorl 8(%rsp),%r14d movl %r13d,%eax movl %ebp,4(%rsp) movl %r13d,%ebx xorl 16(%rsp),%r14d andl %r12d,%eax movl %edi,%ecx xorl 40(%rsp),%r14d leal -1894007588(%rbp,%rsi,1),%esi xorl %r12d,%ebx roll $5,%ecx addl %eax,%esi roll $1,%r14d andl %r11d,%ebx addl %ecx,%esi roll $30,%r11d addl %ebx,%esi xorl 12(%rsp),%edx movl %r12d,%eax movl %r14d,8(%rsp) movl %r12d,%ebx xorl 20(%rsp),%edx andl %r11d,%eax movl %esi,%ecx xorl 44(%rsp),%edx leal -1894007588(%r14,%r13,1),%r13d xorl %r11d,%ebx roll $5,%ecx addl %eax,%r13d roll $1,%edx andl %edi,%ebx addl %ecx,%r13d roll $30,%edi addl %ebx,%r13d xorl 16(%rsp),%ebp movl %r11d,%eax movl %edx,12(%rsp) movl %r11d,%ebx xorl 24(%rsp),%ebp andl %edi,%eax movl %r13d,%ecx xorl 48(%rsp),%ebp leal -1894007588(%rdx,%r12,1),%r12d xorl %edi,%ebx roll $5,%ecx addl %eax,%r12d roll $1,%ebp andl %esi,%ebx addl %ecx,%r12d roll $30,%esi addl %ebx,%r12d xorl 20(%rsp),%r14d movl %edi,%eax movl %ebp,16(%rsp) movl %edi,%ebx xorl 28(%rsp),%r14d andl %esi,%eax movl %r12d,%ecx xorl 52(%rsp),%r14d leal -1894007588(%rbp,%r11,1),%r11d xorl %esi,%ebx roll $5,%ecx addl %eax,%r11d roll $1,%r14d andl %r13d,%ebx addl %ecx,%r11d roll $30,%r13d addl %ebx,%r11d xorl 24(%rsp),%edx movl %esi,%eax movl %r14d,20(%rsp) movl %esi,%ebx xorl 32(%rsp),%edx andl %r13d,%eax movl %r11d,%ecx xorl 56(%rsp),%edx leal -1894007588(%r14,%rdi,1),%edi xorl %r13d,%ebx roll $5,%ecx addl %eax,%edi roll $1,%edx andl %r12d,%ebx addl %ecx,%edi roll $30,%r12d addl %ebx,%edi xorl 28(%rsp),%ebp movl %r13d,%eax movl %edx,24(%rsp) movl %r13d,%ebx xorl 36(%rsp),%ebp andl %r12d,%eax movl %edi,%ecx xorl 60(%rsp),%ebp leal -1894007588(%rdx,%rsi,1),%esi xorl %r12d,%ebx roll $5,%ecx addl %eax,%esi roll $1,%ebp andl %r11d,%ebx addl %ecx,%esi roll $30,%r11d addl %ebx,%esi xorl 32(%rsp),%r14d movl %r12d,%eax movl %ebp,28(%rsp) movl %r12d,%ebx xorl 40(%rsp),%r14d andl %r11d,%eax movl %esi,%ecx xorl 0(%rsp),%r14d leal -1894007588(%rbp,%r13,1),%r13d xorl %r11d,%ebx roll $5,%ecx addl %eax,%r13d roll $1,%r14d andl %edi,%ebx addl %ecx,%r13d roll $30,%edi addl %ebx,%r13d xorl 36(%rsp),%edx movl %r11d,%eax movl %r14d,32(%rsp) movl %r11d,%ebx xorl 44(%rsp),%edx andl %edi,%eax movl %r13d,%ecx xorl 4(%rsp),%edx leal -1894007588(%r14,%r12,1),%r12d xorl %edi,%ebx roll $5,%ecx addl %eax,%r12d roll $1,%edx andl %esi,%ebx addl %ecx,%r12d roll $30,%esi addl %ebx,%r12d xorl 40(%rsp),%ebp movl %edi,%eax movl %edx,36(%rsp) movl %edi,%ebx xorl 48(%rsp),%ebp andl %esi,%eax movl %r12d,%ecx xorl 8(%rsp),%ebp leal -1894007588(%rdx,%r11,1),%r11d xorl %esi,%ebx roll $5,%ecx addl %eax,%r11d roll $1,%ebp andl %r13d,%ebx addl %ecx,%r11d roll $30,%r13d addl %ebx,%r11d xorl 44(%rsp),%r14d movl %esi,%eax movl %ebp,40(%rsp) movl %esi,%ebx xorl 52(%rsp),%r14d andl %r13d,%eax movl %r11d,%ecx xorl 12(%rsp),%r14d leal -1894007588(%rbp,%rdi,1),%edi xorl %r13d,%ebx roll $5,%ecx addl %eax,%edi roll $1,%r14d andl %r12d,%ebx addl %ecx,%edi roll $30,%r12d addl %ebx,%edi xorl 48(%rsp),%edx movl %r13d,%eax movl %r14d,44(%rsp) movl %r13d,%ebx xorl 56(%rsp),%edx andl %r12d,%eax movl %edi,%ecx xorl 16(%rsp),%edx leal -1894007588(%r14,%rsi,1),%esi xorl %r12d,%ebx roll $5,%ecx addl %eax,%esi roll $1,%edx andl %r11d,%ebx addl %ecx,%esi roll $30,%r11d addl %ebx,%esi xorl 52(%rsp),%ebp movl %edi,%eax movl %edx,48(%rsp) movl %esi,%ecx xorl 60(%rsp),%ebp xorl %r12d,%eax roll $5,%ecx xorl 20(%rsp),%ebp leal -899497514(%rdx,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%ebp xorl 56(%rsp),%r14d movl %esi,%eax movl %ebp,52(%rsp) movl %r13d,%ecx xorl 0(%rsp),%r14d xorl %r11d,%eax roll $5,%ecx xorl 24(%rsp),%r14d leal -899497514(%rbp,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%r14d xorl 60(%rsp),%edx movl %r13d,%eax movl %r14d,56(%rsp) movl %r12d,%ecx xorl 4(%rsp),%edx xorl %edi,%eax roll $5,%ecx xorl 28(%rsp),%edx leal -899497514(%r14,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%edx xorl 0(%rsp),%ebp movl %r12d,%eax movl %edx,60(%rsp) movl %r11d,%ecx xorl 8(%rsp),%ebp xorl %esi,%eax roll $5,%ecx xorl 32(%rsp),%ebp leal -899497514(%rdx,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%ebp xorl 4(%rsp),%r14d movl %r11d,%eax movl %ebp,0(%rsp) movl %edi,%ecx xorl 12(%rsp),%r14d xorl %r13d,%eax roll $5,%ecx xorl 36(%rsp),%r14d leal -899497514(%rbp,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%r14d xorl 8(%rsp),%edx movl %edi,%eax movl %r14d,4(%rsp) movl %esi,%ecx xorl 16(%rsp),%edx xorl %r12d,%eax roll $5,%ecx xorl 40(%rsp),%edx leal -899497514(%r14,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%edx xorl 12(%rsp),%ebp movl %esi,%eax movl %edx,8(%rsp) movl %r13d,%ecx xorl 20(%rsp),%ebp xorl %r11d,%eax roll $5,%ecx xorl 44(%rsp),%ebp leal -899497514(%rdx,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%ebp xorl 16(%rsp),%r14d movl %r13d,%eax movl %ebp,12(%rsp) movl %r12d,%ecx xorl 24(%rsp),%r14d xorl %edi,%eax roll $5,%ecx xorl 48(%rsp),%r14d leal -899497514(%rbp,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%r14d xorl 20(%rsp),%edx movl %r12d,%eax movl %r14d,16(%rsp) movl %r11d,%ecx xorl 28(%rsp),%edx xorl %esi,%eax roll $5,%ecx xorl 52(%rsp),%edx leal -899497514(%r14,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%edx xorl 24(%rsp),%ebp movl %r11d,%eax movl %edx,20(%rsp) movl %edi,%ecx xorl 32(%rsp),%ebp xorl %r13d,%eax roll $5,%ecx xorl 56(%rsp),%ebp leal -899497514(%rdx,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%ebp xorl 28(%rsp),%r14d movl %edi,%eax movl %ebp,24(%rsp) movl %esi,%ecx xorl 36(%rsp),%r14d xorl %r12d,%eax roll $5,%ecx xorl 60(%rsp),%r14d leal -899497514(%rbp,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%r14d xorl 32(%rsp),%edx movl %esi,%eax movl %r14d,28(%rsp) movl %r13d,%ecx xorl 40(%rsp),%edx xorl %r11d,%eax roll $5,%ecx xorl 0(%rsp),%edx leal -899497514(%r14,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%edx xorl 36(%rsp),%ebp movl %r13d,%eax movl %r12d,%ecx xorl 44(%rsp),%ebp xorl %edi,%eax roll $5,%ecx xorl 4(%rsp),%ebp leal -899497514(%rdx,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%ebp xorl 40(%rsp),%r14d movl %r12d,%eax movl %r11d,%ecx xorl 48(%rsp),%r14d xorl %esi,%eax roll $5,%ecx xorl 8(%rsp),%r14d leal -899497514(%rbp,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%r14d xorl 44(%rsp),%edx movl %r11d,%eax movl %edi,%ecx xorl 52(%rsp),%edx xorl %r13d,%eax roll $5,%ecx xorl 12(%rsp),%edx leal -899497514(%r14,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%edx xorl 48(%rsp),%ebp movl %edi,%eax movl %esi,%ecx xorl 56(%rsp),%ebp xorl %r12d,%eax roll $5,%ecx xorl 16(%rsp),%ebp leal -899497514(%rdx,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%ebp xorl 52(%rsp),%r14d movl %esi,%eax movl %r13d,%ecx xorl 60(%rsp),%r14d xorl %r11d,%eax roll $5,%ecx xorl 20(%rsp),%r14d leal -899497514(%rbp,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%r14d xorl 56(%rsp),%edx movl %r13d,%eax movl %r12d,%ecx xorl 0(%rsp),%edx xorl %edi,%eax roll $5,%ecx xorl 24(%rsp),%edx leal -899497514(%r14,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%edx xorl 60(%rsp),%ebp movl %r12d,%eax movl %r11d,%ecx xorl 4(%rsp),%ebp xorl %esi,%eax roll $5,%ecx xorl 28(%rsp),%ebp leal -899497514(%rdx,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%ebp movl %r11d,%eax movl %edi,%ecx xorl %r13d,%eax leal -899497514(%rbp,%rsi,1),%esi roll $5,%ecx xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi addl 0(%r8),%esi addl 4(%r8),%edi addl 8(%r8),%r11d addl 12(%r8),%r12d addl 16(%r8),%r13d movl %esi,0(%r8) movl %edi,4(%r8) movl %r11d,8(%r8) movl %r12d,12(%r8) movl %r13d,16(%r8) subq $1,%r10 leaq 64(%r9),%r9 jnz L$loop movq 64(%rsp),%rsi movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue: .byte 0xf3,0xc3 .globl _sha1_block_data_order_hw .private_extern _sha1_block_data_order_hw .p2align 5 _sha1_block_data_order_hw: _CET_ENDBR movdqu (%rdi),%xmm0 movd 16(%rdi),%xmm1 movdqa K_XX_XX+160(%rip),%xmm3 movdqu (%rsi),%xmm4 pshufd $27,%xmm0,%xmm0 movdqu 16(%rsi),%xmm5 pshufd $27,%xmm1,%xmm1 movdqu 32(%rsi),%xmm6 .byte 102,15,56,0,227 movdqu 48(%rsi),%xmm7 .byte 102,15,56,0,235 .byte 102,15,56,0,243 movdqa %xmm1,%xmm9 .byte 102,15,56,0,251 jmp L$oop_shaext .p2align 4 L$oop_shaext: decq %rdx leaq 64(%rsi),%r8 paddd %xmm4,%xmm1 cmovneq %r8,%rsi prefetcht0 512(%rsi) movdqa %xmm0,%xmm8 .byte 15,56,201,229 movdqa %xmm0,%xmm2 .byte 15,58,204,193,0 .byte 15,56,200,213 pxor %xmm6,%xmm4 .byte 15,56,201,238 .byte 15,56,202,231 movdqa %xmm0,%xmm1 .byte 15,58,204,194,0 .byte 15,56,200,206 pxor %xmm7,%xmm5 .byte 15,56,202,236 .byte 15,56,201,247 movdqa %xmm0,%xmm2 .byte 15,58,204,193,0 .byte 15,56,200,215 pxor %xmm4,%xmm6 .byte 15,56,201,252 .byte 15,56,202,245 movdqa %xmm0,%xmm1 .byte 15,58,204,194,0 .byte 15,56,200,204 pxor %xmm5,%xmm7 .byte 15,56,202,254 .byte 15,56,201,229 movdqa %xmm0,%xmm2 .byte 15,58,204,193,0 .byte 15,56,200,213 pxor %xmm6,%xmm4 .byte 15,56,201,238 .byte 15,56,202,231 movdqa %xmm0,%xmm1 .byte 15,58,204,194,1 .byte 15,56,200,206 pxor %xmm7,%xmm5 .byte 15,56,202,236 .byte 15,56,201,247 movdqa %xmm0,%xmm2 .byte 15,58,204,193,1 .byte 15,56,200,215 pxor %xmm4,%xmm6 .byte 15,56,201,252 .byte 15,56,202,245 movdqa %xmm0,%xmm1 .byte 15,58,204,194,1 .byte 15,56,200,204 pxor %xmm5,%xmm7 .byte 15,56,202,254 .byte 15,56,201,229 movdqa %xmm0,%xmm2 .byte 15,58,204,193,1 .byte 15,56,200,213 pxor %xmm6,%xmm4 .byte 15,56,201,238 .byte 15,56,202,231 movdqa %xmm0,%xmm1 .byte 15,58,204,194,1 .byte 15,56,200,206 pxor %xmm7,%xmm5 .byte 15,56,202,236 .byte 15,56,201,247 movdqa %xmm0,%xmm2 .byte 15,58,204,193,2 .byte 15,56,200,215 pxor %xmm4,%xmm6 .byte 15,56,201,252 .byte 15,56,202,245 movdqa %xmm0,%xmm1 .byte 15,58,204,194,2 .byte 15,56,200,204 pxor %xmm5,%xmm7 .byte 15,56,202,254 .byte 15,56,201,229 movdqa %xmm0,%xmm2 .byte 15,58,204,193,2 .byte 15,56,200,213 pxor %xmm6,%xmm4 .byte 15,56,201,238 .byte 15,56,202,231 movdqa %xmm0,%xmm1 .byte 15,58,204,194,2 .byte 15,56,200,206 pxor %xmm7,%xmm5 .byte 15,56,202,236 .byte 15,56,201,247 movdqa %xmm0,%xmm2 .byte 15,58,204,193,2 .byte 15,56,200,215 pxor %xmm4,%xmm6 .byte 15,56,201,252 .byte 15,56,202,245 movdqa %xmm0,%xmm1 .byte 15,58,204,194,3 .byte 15,56,200,204 pxor %xmm5,%xmm7 .byte 15,56,202,254 movdqu (%rsi),%xmm4 movdqa %xmm0,%xmm2 .byte 15,58,204,193,3 .byte 15,56,200,213 movdqu 16(%rsi),%xmm5 .byte 102,15,56,0,227 movdqa %xmm0,%xmm1 .byte 15,58,204,194,3 .byte 15,56,200,206 movdqu 32(%rsi),%xmm6 .byte 102,15,56,0,235 movdqa %xmm0,%xmm2 .byte 15,58,204,193,3 .byte 15,56,200,215 movdqu 48(%rsi),%xmm7 .byte 102,15,56,0,243 movdqa %xmm0,%xmm1 .byte 15,58,204,194,3 .byte 65,15,56,200,201 .byte 102,15,56,0,251 paddd %xmm8,%xmm0 movdqa %xmm1,%xmm9 jnz L$oop_shaext pshufd $27,%xmm0,%xmm0 pshufd $27,%xmm1,%xmm1 movdqu %xmm0,(%rdi) movd %xmm1,16(%rdi) .byte 0xf3,0xc3 .globl _sha1_block_data_order_ssse3 .private_extern _sha1_block_data_order_ssse3 .p2align 4 _sha1_block_data_order_ssse3: _CET_ENDBR movq %rsp,%r11 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 leaq -64(%rsp),%rsp andq $-64,%rsp movq %rdi,%r8 movq %rsi,%r9 movq %rdx,%r10 shlq $6,%r10 addq %r9,%r10 leaq K_XX_XX+64(%rip),%r14 movl 0(%r8),%eax movl 4(%r8),%ebx movl 8(%r8),%ecx movl 12(%r8),%edx movl %ebx,%esi movl 16(%r8),%ebp movl %ecx,%edi xorl %edx,%edi andl %edi,%esi movdqa 64(%r14),%xmm6 movdqa -64(%r14),%xmm9 movdqu 0(%r9),%xmm0 movdqu 16(%r9),%xmm1 movdqu 32(%r9),%xmm2 movdqu 48(%r9),%xmm3 .byte 102,15,56,0,198 .byte 102,15,56,0,206 .byte 102,15,56,0,214 addq $64,%r9 paddd %xmm9,%xmm0 .byte 102,15,56,0,222 paddd %xmm9,%xmm1 paddd %xmm9,%xmm2 movdqa %xmm0,0(%rsp) psubd %xmm9,%xmm0 movdqa %xmm1,16(%rsp) psubd %xmm9,%xmm1 movdqa %xmm2,32(%rsp) psubd %xmm9,%xmm2 jmp L$oop_ssse3 .p2align 4 L$oop_ssse3: rorl $2,%ebx pshufd $238,%xmm0,%xmm4 xorl %edx,%esi movdqa %xmm3,%xmm8 paddd %xmm3,%xmm9 movl %eax,%edi addl 0(%rsp),%ebp punpcklqdq %xmm1,%xmm4 xorl %ecx,%ebx roll $5,%eax addl %esi,%ebp psrldq $4,%xmm8 andl %ebx,%edi xorl %ecx,%ebx pxor %xmm0,%xmm4 addl %eax,%ebp rorl $7,%eax pxor %xmm2,%xmm8 xorl %ecx,%edi movl %ebp,%esi addl 4(%rsp),%edx pxor %xmm8,%xmm4 xorl %ebx,%eax roll $5,%ebp movdqa %xmm9,48(%rsp) addl %edi,%edx andl %eax,%esi movdqa %xmm4,%xmm10 xorl %ebx,%eax addl %ebp,%edx rorl $7,%ebp movdqa %xmm4,%xmm8 xorl %ebx,%esi pslldq $12,%xmm10 paddd %xmm4,%xmm4 movl %edx,%edi addl 8(%rsp),%ecx psrld $31,%xmm8 xorl %eax,%ebp roll $5,%edx addl %esi,%ecx movdqa %xmm10,%xmm9 andl %ebp,%edi xorl %eax,%ebp psrld $30,%xmm10 addl %edx,%ecx rorl $7,%edx por %xmm8,%xmm4 xorl %eax,%edi movl %ecx,%esi addl 12(%rsp),%ebx pslld $2,%xmm9 pxor %xmm10,%xmm4 xorl %ebp,%edx movdqa -64(%r14),%xmm10 roll $5,%ecx addl %edi,%ebx andl %edx,%esi pxor %xmm9,%xmm4 xorl %ebp,%edx addl %ecx,%ebx rorl $7,%ecx pshufd $238,%xmm1,%xmm5 xorl %ebp,%esi movdqa %xmm4,%xmm9 paddd %xmm4,%xmm10 movl %ebx,%edi addl 16(%rsp),%eax punpcklqdq %xmm2,%xmm5 xorl %edx,%ecx roll $5,%ebx addl %esi,%eax psrldq $4,%xmm9 andl %ecx,%edi xorl %edx,%ecx pxor %xmm1,%xmm5 addl %ebx,%eax rorl $7,%ebx pxor %xmm3,%xmm9 xorl %edx,%edi movl %eax,%esi addl 20(%rsp),%ebp pxor %xmm9,%xmm5 xorl %ecx,%ebx roll $5,%eax movdqa %xmm10,0(%rsp) addl %edi,%ebp andl %ebx,%esi movdqa %xmm5,%xmm8 xorl %ecx,%ebx addl %eax,%ebp rorl $7,%eax movdqa %xmm5,%xmm9 xorl %ecx,%esi pslldq $12,%xmm8 paddd %xmm5,%xmm5 movl %ebp,%edi addl 24(%rsp),%edx psrld $31,%xmm9 xorl %ebx,%eax roll $5,%ebp addl %esi,%edx movdqa %xmm8,%xmm10 andl %eax,%edi xorl %ebx,%eax psrld $30,%xmm8 addl %ebp,%edx rorl $7,%ebp por %xmm9,%xmm5 xorl %ebx,%edi movl %edx,%esi addl 28(%rsp),%ecx pslld $2,%xmm10 pxor %xmm8,%xmm5 xorl %eax,%ebp movdqa -32(%r14),%xmm8 roll $5,%edx addl %edi,%ecx andl %ebp,%esi pxor %xmm10,%xmm5 xorl %eax,%ebp addl %edx,%ecx rorl $7,%edx pshufd $238,%xmm2,%xmm6 xorl %eax,%esi movdqa %xmm5,%xmm10 paddd %xmm5,%xmm8 movl %ecx,%edi addl 32(%rsp),%ebx punpcklqdq %xmm3,%xmm6 xorl %ebp,%edx roll $5,%ecx addl %esi,%ebx psrldq $4,%xmm10 andl %edx,%edi xorl %ebp,%edx pxor %xmm2,%xmm6 addl %ecx,%ebx rorl $7,%ecx pxor %xmm4,%xmm10 xorl %ebp,%edi movl %ebx,%esi addl 36(%rsp),%eax pxor %xmm10,%xmm6 xorl %edx,%ecx roll $5,%ebx movdqa %xmm8,16(%rsp) addl %edi,%eax andl %ecx,%esi movdqa %xmm6,%xmm9 xorl %edx,%ecx addl %ebx,%eax rorl $7,%ebx movdqa %xmm6,%xmm10 xorl %edx,%esi pslldq $12,%xmm9 paddd %xmm6,%xmm6 movl %eax,%edi addl 40(%rsp),%ebp psrld $31,%xmm10 xorl %ecx,%ebx roll $5,%eax addl %esi,%ebp movdqa %xmm9,%xmm8 andl %ebx,%edi xorl %ecx,%ebx psrld $30,%xmm9 addl %eax,%ebp rorl $7,%eax por %xmm10,%xmm6 xorl %ecx,%edi movl %ebp,%esi addl 44(%rsp),%edx pslld $2,%xmm8 pxor %xmm9,%xmm6 xorl %ebx,%eax movdqa -32(%r14),%xmm9 roll $5,%ebp addl %edi,%edx andl %eax,%esi pxor %xmm8,%xmm6 xorl %ebx,%eax addl %ebp,%edx rorl $7,%ebp pshufd $238,%xmm3,%xmm7 xorl %ebx,%esi movdqa %xmm6,%xmm8 paddd %xmm6,%xmm9 movl %edx,%edi addl 48(%rsp),%ecx punpcklqdq %xmm4,%xmm7 xorl %eax,%ebp roll $5,%edx addl %esi,%ecx psrldq $4,%xmm8 andl %ebp,%edi xorl %eax,%ebp pxor %xmm3,%xmm7 addl %edx,%ecx rorl $7,%edx pxor %xmm5,%xmm8 xorl %eax,%edi movl %ecx,%esi addl 52(%rsp),%ebx pxor %xmm8,%xmm7 xorl %ebp,%edx roll $5,%ecx movdqa %xmm9,32(%rsp) addl %edi,%ebx andl %edx,%esi movdqa %xmm7,%xmm10 xorl %ebp,%edx addl %ecx,%ebx rorl $7,%ecx movdqa %xmm7,%xmm8 xorl %ebp,%esi pslldq $12,%xmm10 paddd %xmm7,%xmm7 movl %ebx,%edi addl 56(%rsp),%eax psrld $31,%xmm8 xorl %edx,%ecx roll $5,%ebx addl %esi,%eax movdqa %xmm10,%xmm9 andl %ecx,%edi xorl %edx,%ecx psrld $30,%xmm10 addl %ebx,%eax rorl $7,%ebx por %xmm8,%xmm7 xorl %edx,%edi movl %eax,%esi addl 60(%rsp),%ebp pslld $2,%xmm9 pxor %xmm10,%xmm7 xorl %ecx,%ebx movdqa -32(%r14),%xmm10 roll $5,%eax addl %edi,%ebp andl %ebx,%esi pxor %xmm9,%xmm7 pshufd $238,%xmm6,%xmm9 xorl %ecx,%ebx addl %eax,%ebp rorl $7,%eax pxor %xmm4,%xmm0 xorl %ecx,%esi movl %ebp,%edi addl 0(%rsp),%edx punpcklqdq %xmm7,%xmm9 xorl %ebx,%eax roll $5,%ebp pxor %xmm1,%xmm0 addl %esi,%edx andl %eax,%edi movdqa %xmm10,%xmm8 xorl %ebx,%eax paddd %xmm7,%xmm10 addl %ebp,%edx pxor %xmm9,%xmm0 rorl $7,%ebp xorl %ebx,%edi movl %edx,%esi addl 4(%rsp),%ecx movdqa %xmm0,%xmm9 xorl %eax,%ebp roll $5,%edx movdqa %xmm10,48(%rsp) addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp pslld $2,%xmm0 addl %edx,%ecx rorl $7,%edx psrld $30,%xmm9 xorl %eax,%esi movl %ecx,%edi addl 8(%rsp),%ebx por %xmm9,%xmm0 xorl %ebp,%edx roll $5,%ecx pshufd $238,%xmm7,%xmm10 addl %esi,%ebx andl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 12(%rsp),%eax xorl %ebp,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax pxor %xmm5,%xmm1 addl 16(%rsp),%ebp xorl %ecx,%esi punpcklqdq %xmm0,%xmm10 movl %eax,%edi roll $5,%eax pxor %xmm2,%xmm1 addl %esi,%ebp xorl %ecx,%edi movdqa %xmm8,%xmm9 rorl $7,%ebx paddd %xmm0,%xmm8 addl %eax,%ebp pxor %xmm10,%xmm1 addl 20(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp movdqa %xmm1,%xmm10 addl %edi,%edx xorl %ebx,%esi movdqa %xmm8,0(%rsp) rorl $7,%eax addl %ebp,%edx addl 24(%rsp),%ecx pslld $2,%xmm1 xorl %eax,%esi movl %edx,%edi psrld $30,%xmm10 roll $5,%edx addl %esi,%ecx xorl %eax,%edi rorl $7,%ebp por %xmm10,%xmm1 addl %edx,%ecx addl 28(%rsp),%ebx pshufd $238,%xmm0,%xmm8 xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx pxor %xmm6,%xmm2 addl 32(%rsp),%eax xorl %edx,%esi punpcklqdq %xmm1,%xmm8 movl %ebx,%edi roll $5,%ebx pxor %xmm3,%xmm2 addl %esi,%eax xorl %edx,%edi movdqa 0(%r14),%xmm10 rorl $7,%ecx paddd %xmm1,%xmm9 addl %ebx,%eax pxor %xmm8,%xmm2 addl 36(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi roll $5,%eax movdqa %xmm2,%xmm8 addl %edi,%ebp xorl %ecx,%esi movdqa %xmm9,16(%rsp) rorl $7,%ebx addl %eax,%ebp addl 40(%rsp),%edx pslld $2,%xmm2 xorl %ebx,%esi movl %ebp,%edi psrld $30,%xmm8 roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax por %xmm8,%xmm2 addl %ebp,%edx addl 44(%rsp),%ecx pshufd $238,%xmm1,%xmm9 xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx pxor %xmm7,%xmm3 addl 48(%rsp),%ebx xorl %ebp,%esi punpcklqdq %xmm2,%xmm9 movl %ecx,%edi roll $5,%ecx pxor %xmm4,%xmm3 addl %esi,%ebx xorl %ebp,%edi movdqa %xmm10,%xmm8 rorl $7,%edx paddd %xmm2,%xmm10 addl %ecx,%ebx pxor %xmm9,%xmm3 addl 52(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx movdqa %xmm3,%xmm9 addl %edi,%eax xorl %edx,%esi movdqa %xmm10,32(%rsp) rorl $7,%ecx addl %ebx,%eax addl 56(%rsp),%ebp pslld $2,%xmm3 xorl %ecx,%esi movl %eax,%edi psrld $30,%xmm9 roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx por %xmm9,%xmm3 addl %eax,%ebp addl 60(%rsp),%edx pshufd $238,%xmm2,%xmm10 xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx pxor %xmm0,%xmm4 addl 0(%rsp),%ecx xorl %eax,%esi punpcklqdq %xmm3,%xmm10 movl %edx,%edi roll $5,%edx pxor %xmm5,%xmm4 addl %esi,%ecx xorl %eax,%edi movdqa %xmm8,%xmm9 rorl $7,%ebp paddd %xmm3,%xmm8 addl %edx,%ecx pxor %xmm10,%xmm4 addl 4(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx movdqa %xmm4,%xmm10 addl %edi,%ebx xorl %ebp,%esi movdqa %xmm8,48(%rsp) rorl $7,%edx addl %ecx,%ebx addl 8(%rsp),%eax pslld $2,%xmm4 xorl %edx,%esi movl %ebx,%edi psrld $30,%xmm10 roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx por %xmm10,%xmm4 addl %ebx,%eax addl 12(%rsp),%ebp pshufd $238,%xmm3,%xmm8 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp pxor %xmm1,%xmm5 addl 16(%rsp),%edx xorl %ebx,%esi punpcklqdq %xmm4,%xmm8 movl %ebp,%edi roll $5,%ebp pxor %xmm6,%xmm5 addl %esi,%edx xorl %ebx,%edi movdqa %xmm9,%xmm10 rorl $7,%eax paddd %xmm4,%xmm9 addl %ebp,%edx pxor %xmm8,%xmm5 addl 20(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx movdqa %xmm5,%xmm8 addl %edi,%ecx xorl %eax,%esi movdqa %xmm9,0(%rsp) rorl $7,%ebp addl %edx,%ecx addl 24(%rsp),%ebx pslld $2,%xmm5 xorl %ebp,%esi movl %ecx,%edi psrld $30,%xmm8 roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx por %xmm8,%xmm5 addl %ecx,%ebx addl 28(%rsp),%eax pshufd $238,%xmm4,%xmm9 rorl $7,%ecx movl %ebx,%esi xorl %edx,%edi roll $5,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax pxor %xmm2,%xmm6 addl 32(%rsp),%ebp andl %ecx,%esi xorl %edx,%ecx rorl $7,%ebx punpcklqdq %xmm5,%xmm9 movl %eax,%edi xorl %ecx,%esi pxor %xmm7,%xmm6 roll $5,%eax addl %esi,%ebp movdqa %xmm10,%xmm8 xorl %ebx,%edi paddd %xmm5,%xmm10 xorl %ecx,%ebx pxor %xmm9,%xmm6 addl %eax,%ebp addl 36(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx rorl $7,%eax movdqa %xmm6,%xmm9 movl %ebp,%esi xorl %ebx,%edi movdqa %xmm10,16(%rsp) roll $5,%ebp addl %edi,%edx xorl %eax,%esi pslld $2,%xmm6 xorl %ebx,%eax addl %ebp,%edx psrld $30,%xmm9 addl 40(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax por %xmm9,%xmm6 rorl $7,%ebp movl %edx,%edi xorl %eax,%esi roll $5,%edx pshufd $238,%xmm5,%xmm10 addl %esi,%ecx xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 44(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp rorl $7,%edx movl %ecx,%esi xorl %ebp,%edi roll $5,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx pxor %xmm3,%xmm7 addl 48(%rsp),%eax andl %edx,%esi xorl %ebp,%edx rorl $7,%ecx punpcklqdq %xmm6,%xmm10 movl %ebx,%edi xorl %edx,%esi pxor %xmm0,%xmm7 roll $5,%ebx addl %esi,%eax movdqa 32(%r14),%xmm9 xorl %ecx,%edi paddd %xmm6,%xmm8 xorl %edx,%ecx pxor %xmm10,%xmm7 addl %ebx,%eax addl 52(%rsp),%ebp andl %ecx,%edi xorl %edx,%ecx rorl $7,%ebx movdqa %xmm7,%xmm10 movl %eax,%esi xorl %ecx,%edi movdqa %xmm8,32(%rsp) roll $5,%eax addl %edi,%ebp xorl %ebx,%esi pslld $2,%xmm7 xorl %ecx,%ebx addl %eax,%ebp psrld $30,%xmm10 addl 56(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx por %xmm10,%xmm7 rorl $7,%eax movl %ebp,%edi xorl %ebx,%esi roll $5,%ebp pshufd $238,%xmm6,%xmm8 addl %esi,%edx xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 60(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax rorl $7,%ebp movl %edx,%esi xorl %eax,%edi roll $5,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx pxor %xmm4,%xmm0 addl 0(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp rorl $7,%edx punpcklqdq %xmm7,%xmm8 movl %ecx,%edi xorl %ebp,%esi pxor %xmm1,%xmm0 roll $5,%ecx addl %esi,%ebx movdqa %xmm9,%xmm10 xorl %edx,%edi paddd %xmm7,%xmm9 xorl %ebp,%edx pxor %xmm8,%xmm0 addl %ecx,%ebx addl 4(%rsp),%eax andl %edx,%edi xorl %ebp,%edx rorl $7,%ecx movdqa %xmm0,%xmm8 movl %ebx,%esi xorl %edx,%edi movdqa %xmm9,48(%rsp) roll $5,%ebx addl %edi,%eax xorl %ecx,%esi pslld $2,%xmm0 xorl %edx,%ecx addl %ebx,%eax psrld $30,%xmm8 addl 8(%rsp),%ebp andl %ecx,%esi xorl %edx,%ecx por %xmm8,%xmm0 rorl $7,%ebx movl %eax,%edi xorl %ecx,%esi roll $5,%eax pshufd $238,%xmm7,%xmm9 addl %esi,%ebp xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 12(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx rorl $7,%eax movl %ebp,%esi xorl %ebx,%edi roll $5,%ebp addl %edi,%edx xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx pxor %xmm5,%xmm1 addl 16(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax rorl $7,%ebp punpcklqdq %xmm0,%xmm9 movl %edx,%edi xorl %eax,%esi pxor %xmm2,%xmm1 roll $5,%edx addl %esi,%ecx movdqa %xmm10,%xmm8 xorl %ebp,%edi paddd %xmm0,%xmm10 xorl %eax,%ebp pxor %xmm9,%xmm1 addl %edx,%ecx addl 20(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp rorl $7,%edx movdqa %xmm1,%xmm9 movl %ecx,%esi xorl %ebp,%edi movdqa %xmm10,0(%rsp) roll $5,%ecx addl %edi,%ebx xorl %edx,%esi pslld $2,%xmm1 xorl %ebp,%edx addl %ecx,%ebx psrld $30,%xmm9 addl 24(%rsp),%eax andl %edx,%esi xorl %ebp,%edx por %xmm9,%xmm1 rorl $7,%ecx movl %ebx,%edi xorl %edx,%esi roll $5,%ebx pshufd $238,%xmm0,%xmm10 addl %esi,%eax xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 28(%rsp),%ebp andl %ecx,%edi xorl %edx,%ecx rorl $7,%ebx movl %eax,%esi xorl %ecx,%edi roll $5,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp pxor %xmm6,%xmm2 addl 32(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx rorl $7,%eax punpcklqdq %xmm1,%xmm10 movl %ebp,%edi xorl %ebx,%esi pxor %xmm3,%xmm2 roll $5,%ebp addl %esi,%edx movdqa %xmm8,%xmm9 xorl %eax,%edi paddd %xmm1,%xmm8 xorl %ebx,%eax pxor %xmm10,%xmm2 addl %ebp,%edx addl 36(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax rorl $7,%ebp movdqa %xmm2,%xmm10 movl %edx,%esi xorl %eax,%edi movdqa %xmm8,16(%rsp) roll $5,%edx addl %edi,%ecx xorl %ebp,%esi pslld $2,%xmm2 xorl %eax,%ebp addl %edx,%ecx psrld $30,%xmm10 addl 40(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp por %xmm10,%xmm2 rorl $7,%edx movl %ecx,%edi xorl %ebp,%esi roll $5,%ecx pshufd $238,%xmm1,%xmm8 addl %esi,%ebx xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 44(%rsp),%eax andl %edx,%edi xorl %ebp,%edx rorl $7,%ecx movl %ebx,%esi xorl %edx,%edi roll $5,%ebx addl %edi,%eax xorl %edx,%esi addl %ebx,%eax pxor %xmm7,%xmm3 addl 48(%rsp),%ebp xorl %ecx,%esi punpcklqdq %xmm2,%xmm8 movl %eax,%edi roll $5,%eax pxor %xmm4,%xmm3 addl %esi,%ebp xorl %ecx,%edi movdqa %xmm9,%xmm10 rorl $7,%ebx paddd %xmm2,%xmm9 addl %eax,%ebp pxor %xmm8,%xmm3 addl 52(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp movdqa %xmm3,%xmm8 addl %edi,%edx xorl %ebx,%esi movdqa %xmm9,32(%rsp) rorl $7,%eax addl %ebp,%edx addl 56(%rsp),%ecx pslld $2,%xmm3 xorl %eax,%esi movl %edx,%edi psrld $30,%xmm8 roll $5,%edx addl %esi,%ecx xorl %eax,%edi rorl $7,%ebp por %xmm8,%xmm3 addl %edx,%ecx addl 60(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx addl 0(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx paddd %xmm3,%xmm10 addl %esi,%eax xorl %edx,%edi movdqa %xmm10,48(%rsp) rorl $7,%ecx addl %ebx,%eax addl 4(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 8(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax addl %ebp,%edx addl 12(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx cmpq %r10,%r9 je L$done_ssse3 movdqa 64(%r14),%xmm6 movdqa -64(%r14),%xmm9 movdqu 0(%r9),%xmm0 movdqu 16(%r9),%xmm1 movdqu 32(%r9),%xmm2 movdqu 48(%r9),%xmm3 .byte 102,15,56,0,198 addq $64,%r9 addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi .byte 102,15,56,0,206 roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx paddd %xmm9,%xmm0 addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi movdqa %xmm0,0(%rsp) roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx psubd %xmm9,%xmm0 addl %ebx,%eax addl 24(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi .byte 102,15,56,0,214 roll $5,%edx addl %esi,%ecx xorl %eax,%edi rorl $7,%ebp paddd %xmm9,%xmm1 addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi movdqa %xmm1,16(%rsp) roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx psubd %xmm9,%xmm1 addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx addl %ebx,%eax addl 44(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi .byte 102,15,56,0,222 roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax paddd %xmm9,%xmm2 addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi movdqa %xmm2,32(%rsp) roll $5,%edx addl %edi,%ecx xorl %eax,%esi rorl $7,%ebp psubd %xmm9,%xmm2 addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax rorl $7,%ecx addl %ebx,%eax addl 0(%r8),%eax addl 4(%r8),%esi addl 8(%r8),%ecx addl 12(%r8),%edx movl %eax,0(%r8) addl 16(%r8),%ebp movl %esi,4(%r8) movl %esi,%ebx movl %ecx,8(%r8) movl %ecx,%edi movl %edx,12(%r8) xorl %edx,%edi movl %ebp,16(%r8) andl %edi,%esi jmp L$oop_ssse3 .p2align 4 L$done_ssse3: addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax addl 24(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi roll $5,%edx addl %esi,%ecx xorl %eax,%edi rorl $7,%ebp addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx addl %ebx,%eax addl 44(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax rorl $7,%ecx addl %ebx,%eax addl 0(%r8),%eax addl 4(%r8),%esi addl 8(%r8),%ecx movl %eax,0(%r8) addl 12(%r8),%edx movl %esi,4(%r8) addl 16(%r8),%ebp movl %ecx,8(%r8) movl %edx,12(%r8) movl %ebp,16(%r8) movq -40(%r11),%r14 movq -32(%r11),%r13 movq -24(%r11),%r12 movq -16(%r11),%rbp movq -8(%r11),%rbx leaq (%r11),%rsp L$epilogue_ssse3: .byte 0xf3,0xc3 .globl _sha1_block_data_order_avx .private_extern _sha1_block_data_order_avx .p2align 4 _sha1_block_data_order_avx: _CET_ENDBR movq %rsp,%r11 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 leaq -64(%rsp),%rsp vzeroupper andq $-64,%rsp movq %rdi,%r8 movq %rsi,%r9 movq %rdx,%r10 shlq $6,%r10 addq %r9,%r10 leaq K_XX_XX+64(%rip),%r14 movl 0(%r8),%eax movl 4(%r8),%ebx movl 8(%r8),%ecx movl 12(%r8),%edx movl %ebx,%esi movl 16(%r8),%ebp movl %ecx,%edi xorl %edx,%edi andl %edi,%esi vmovdqa 64(%r14),%xmm6 vmovdqa -64(%r14),%xmm11 vmovdqu 0(%r9),%xmm0 vmovdqu 16(%r9),%xmm1 vmovdqu 32(%r9),%xmm2 vmovdqu 48(%r9),%xmm3 vpshufb %xmm6,%xmm0,%xmm0 addq $64,%r9 vpshufb %xmm6,%xmm1,%xmm1 vpshufb %xmm6,%xmm2,%xmm2 vpshufb %xmm6,%xmm3,%xmm3 vpaddd %xmm11,%xmm0,%xmm4 vpaddd %xmm11,%xmm1,%xmm5 vpaddd %xmm11,%xmm2,%xmm6 vmovdqa %xmm4,0(%rsp) vmovdqa %xmm5,16(%rsp) vmovdqa %xmm6,32(%rsp) jmp L$oop_avx .p2align 4 L$oop_avx: shrdl $2,%ebx,%ebx xorl %edx,%esi vpalignr $8,%xmm0,%xmm1,%xmm4 movl %eax,%edi addl 0(%rsp),%ebp vpaddd %xmm3,%xmm11,%xmm9 xorl %ecx,%ebx shldl $5,%eax,%eax vpsrldq $4,%xmm3,%xmm8 addl %esi,%ebp andl %ebx,%edi vpxor %xmm0,%xmm4,%xmm4 xorl %ecx,%ebx addl %eax,%ebp vpxor %xmm2,%xmm8,%xmm8 shrdl $7,%eax,%eax xorl %ecx,%edi movl %ebp,%esi addl 4(%rsp),%edx vpxor %xmm8,%xmm4,%xmm4 xorl %ebx,%eax shldl $5,%ebp,%ebp vmovdqa %xmm9,48(%rsp) addl %edi,%edx andl %eax,%esi vpsrld $31,%xmm4,%xmm8 xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%esi vpslldq $12,%xmm4,%xmm10 vpaddd %xmm4,%xmm4,%xmm4 movl %edx,%edi addl 8(%rsp),%ecx xorl %eax,%ebp shldl $5,%edx,%edx vpsrld $30,%xmm10,%xmm9 vpor %xmm8,%xmm4,%xmm4 addl %esi,%ecx andl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx vpslld $2,%xmm10,%xmm10 vpxor %xmm9,%xmm4,%xmm4 shrdl $7,%edx,%edx xorl %eax,%edi movl %ecx,%esi addl 12(%rsp),%ebx vpxor %xmm10,%xmm4,%xmm4 xorl %ebp,%edx shldl $5,%ecx,%ecx addl %edi,%ebx andl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %ebp,%esi vpalignr $8,%xmm1,%xmm2,%xmm5 movl %ebx,%edi addl 16(%rsp),%eax vpaddd %xmm4,%xmm11,%xmm9 xorl %edx,%ecx shldl $5,%ebx,%ebx vpsrldq $4,%xmm4,%xmm8 addl %esi,%eax andl %ecx,%edi vpxor %xmm1,%xmm5,%xmm5 xorl %edx,%ecx addl %ebx,%eax vpxor %xmm3,%xmm8,%xmm8 shrdl $7,%ebx,%ebx xorl %edx,%edi movl %eax,%esi addl 20(%rsp),%ebp vpxor %xmm8,%xmm5,%xmm5 xorl %ecx,%ebx shldl $5,%eax,%eax vmovdqa %xmm9,0(%rsp) addl %edi,%ebp andl %ebx,%esi vpsrld $31,%xmm5,%xmm8 xorl %ecx,%ebx addl %eax,%ebp shrdl $7,%eax,%eax xorl %ecx,%esi vpslldq $12,%xmm5,%xmm10 vpaddd %xmm5,%xmm5,%xmm5 movl %ebp,%edi addl 24(%rsp),%edx xorl %ebx,%eax shldl $5,%ebp,%ebp vpsrld $30,%xmm10,%xmm9 vpor %xmm8,%xmm5,%xmm5 addl %esi,%edx andl %eax,%edi xorl %ebx,%eax addl %ebp,%edx vpslld $2,%xmm10,%xmm10 vpxor %xmm9,%xmm5,%xmm5 shrdl $7,%ebp,%ebp xorl %ebx,%edi movl %edx,%esi addl 28(%rsp),%ecx vpxor %xmm10,%xmm5,%xmm5 xorl %eax,%ebp shldl $5,%edx,%edx vmovdqa -32(%r14),%xmm11 addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi vpalignr $8,%xmm2,%xmm3,%xmm6 movl %ecx,%edi addl 32(%rsp),%ebx vpaddd %xmm5,%xmm11,%xmm9 xorl %ebp,%edx shldl $5,%ecx,%ecx vpsrldq $4,%xmm5,%xmm8 addl %esi,%ebx andl %edx,%edi vpxor %xmm2,%xmm6,%xmm6 xorl %ebp,%edx addl %ecx,%ebx vpxor %xmm4,%xmm8,%xmm8 shrdl $7,%ecx,%ecx xorl %ebp,%edi movl %ebx,%esi addl 36(%rsp),%eax vpxor %xmm8,%xmm6,%xmm6 xorl %edx,%ecx shldl $5,%ebx,%ebx vmovdqa %xmm9,16(%rsp) addl %edi,%eax andl %ecx,%esi vpsrld $31,%xmm6,%xmm8 xorl %edx,%ecx addl %ebx,%eax shrdl $7,%ebx,%ebx xorl %edx,%esi vpslldq $12,%xmm6,%xmm10 vpaddd %xmm6,%xmm6,%xmm6 movl %eax,%edi addl 40(%rsp),%ebp xorl %ecx,%ebx shldl $5,%eax,%eax vpsrld $30,%xmm10,%xmm9 vpor %xmm8,%xmm6,%xmm6 addl %esi,%ebp andl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp vpslld $2,%xmm10,%xmm10 vpxor %xmm9,%xmm6,%xmm6 shrdl $7,%eax,%eax xorl %ecx,%edi movl %ebp,%esi addl 44(%rsp),%edx vpxor %xmm10,%xmm6,%xmm6 xorl %ebx,%eax shldl $5,%ebp,%ebp addl %edi,%edx andl %eax,%esi xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%esi vpalignr $8,%xmm3,%xmm4,%xmm7 movl %edx,%edi addl 48(%rsp),%ecx vpaddd %xmm6,%xmm11,%xmm9 xorl %eax,%ebp shldl $5,%edx,%edx vpsrldq $4,%xmm6,%xmm8 addl %esi,%ecx andl %ebp,%edi vpxor %xmm3,%xmm7,%xmm7 xorl %eax,%ebp addl %edx,%ecx vpxor %xmm5,%xmm8,%xmm8 shrdl $7,%edx,%edx xorl %eax,%edi movl %ecx,%esi addl 52(%rsp),%ebx vpxor %xmm8,%xmm7,%xmm7 xorl %ebp,%edx shldl $5,%ecx,%ecx vmovdqa %xmm9,32(%rsp) addl %edi,%ebx andl %edx,%esi vpsrld $31,%xmm7,%xmm8 xorl %ebp,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %ebp,%esi vpslldq $12,%xmm7,%xmm10 vpaddd %xmm7,%xmm7,%xmm7 movl %ebx,%edi addl 56(%rsp),%eax xorl %edx,%ecx shldl $5,%ebx,%ebx vpsrld $30,%xmm10,%xmm9 vpor %xmm8,%xmm7,%xmm7 addl %esi,%eax andl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax vpslld $2,%xmm10,%xmm10 vpxor %xmm9,%xmm7,%xmm7 shrdl $7,%ebx,%ebx xorl %edx,%edi movl %eax,%esi addl 60(%rsp),%ebp vpxor %xmm10,%xmm7,%xmm7 xorl %ecx,%ebx shldl $5,%eax,%eax addl %edi,%ebp andl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp vpalignr $8,%xmm6,%xmm7,%xmm8 vpxor %xmm4,%xmm0,%xmm0 shrdl $7,%eax,%eax xorl %ecx,%esi movl %ebp,%edi addl 0(%rsp),%edx vpxor %xmm1,%xmm0,%xmm0 xorl %ebx,%eax shldl $5,%ebp,%ebp vpaddd %xmm7,%xmm11,%xmm9 addl %esi,%edx andl %eax,%edi vpxor %xmm8,%xmm0,%xmm0 xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%edi vpsrld $30,%xmm0,%xmm8 vmovdqa %xmm9,48(%rsp) movl %edx,%esi addl 4(%rsp),%ecx xorl %eax,%ebp shldl $5,%edx,%edx vpslld $2,%xmm0,%xmm0 addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi movl %ecx,%edi addl 8(%rsp),%ebx vpor %xmm8,%xmm0,%xmm0 xorl %ebp,%edx shldl $5,%ecx,%ecx addl %esi,%ebx andl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 12(%rsp),%eax xorl %ebp,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpalignr $8,%xmm7,%xmm0,%xmm8 vpxor %xmm5,%xmm1,%xmm1 addl 16(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax vpxor %xmm2,%xmm1,%xmm1 addl %esi,%ebp xorl %ecx,%edi vpaddd %xmm0,%xmm11,%xmm9 shrdl $7,%ebx,%ebx addl %eax,%ebp vpxor %xmm8,%xmm1,%xmm1 addl 20(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp vpsrld $30,%xmm1,%xmm8 vmovdqa %xmm9,0(%rsp) addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpslld $2,%xmm1,%xmm1 addl 24(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vpor %xmm8,%xmm1,%xmm1 addl 28(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpalignr $8,%xmm0,%xmm1,%xmm8 vpxor %xmm6,%xmm2,%xmm2 addl 32(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx vpxor %xmm3,%xmm2,%xmm2 addl %esi,%eax xorl %edx,%edi vpaddd %xmm1,%xmm11,%xmm9 vmovdqa 0(%r14),%xmm11 shrdl $7,%ecx,%ecx addl %ebx,%eax vpxor %xmm8,%xmm2,%xmm2 addl 36(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax vpsrld $30,%xmm2,%xmm8 vmovdqa %xmm9,16(%rsp) addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp vpslld $2,%xmm2,%xmm2 addl 40(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx vpor %xmm8,%xmm2,%xmm2 addl 44(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx vpalignr $8,%xmm1,%xmm2,%xmm8 vpxor %xmm7,%xmm3,%xmm3 addl 48(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx vpxor %xmm4,%xmm3,%xmm3 addl %esi,%ebx xorl %ebp,%edi vpaddd %xmm2,%xmm11,%xmm9 shrdl $7,%edx,%edx addl %ecx,%ebx vpxor %xmm8,%xmm3,%xmm3 addl 52(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx vpsrld $30,%xmm3,%xmm8 vmovdqa %xmm9,32(%rsp) addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpslld $2,%xmm3,%xmm3 addl 56(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp vpor %xmm8,%xmm3,%xmm3 addl 60(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpalignr $8,%xmm2,%xmm3,%xmm8 vpxor %xmm0,%xmm4,%xmm4 addl 0(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx vpxor %xmm5,%xmm4,%xmm4 addl %esi,%ecx xorl %eax,%edi vpaddd %xmm3,%xmm11,%xmm9 shrdl $7,%ebp,%ebp addl %edx,%ecx vpxor %xmm8,%xmm4,%xmm4 addl 4(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx vpsrld $30,%xmm4,%xmm8 vmovdqa %xmm9,48(%rsp) addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpslld $2,%xmm4,%xmm4 addl 8(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax vpor %xmm8,%xmm4,%xmm4 addl 12(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp vpalignr $8,%xmm3,%xmm4,%xmm8 vpxor %xmm1,%xmm5,%xmm5 addl 16(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp vpxor %xmm6,%xmm5,%xmm5 addl %esi,%edx xorl %ebx,%edi vpaddd %xmm4,%xmm11,%xmm9 shrdl $7,%eax,%eax addl %ebp,%edx vpxor %xmm8,%xmm5,%xmm5 addl 20(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx vpsrld $30,%xmm5,%xmm8 vmovdqa %xmm9,0(%rsp) addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx vpslld $2,%xmm5,%xmm5 addl 24(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx vpor %xmm8,%xmm5,%xmm5 addl 28(%rsp),%eax shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax vpalignr $8,%xmm4,%xmm5,%xmm8 vpxor %xmm2,%xmm6,%xmm6 addl 32(%rsp),%ebp andl %ecx,%esi xorl %edx,%ecx shrdl $7,%ebx,%ebx vpxor %xmm7,%xmm6,%xmm6 movl %eax,%edi xorl %ecx,%esi vpaddd %xmm5,%xmm11,%xmm9 shldl $5,%eax,%eax addl %esi,%ebp vpxor %xmm8,%xmm6,%xmm6 xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 36(%rsp),%edx vpsrld $30,%xmm6,%xmm8 vmovdqa %xmm9,16(%rsp) andl %ebx,%edi xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%esi vpslld $2,%xmm6,%xmm6 xorl %ebx,%edi shldl $5,%ebp,%ebp addl %edi,%edx xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx addl 40(%rsp),%ecx andl %eax,%esi vpor %xmm8,%xmm6,%xmm6 xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%edi xorl %eax,%esi shldl $5,%edx,%edx addl %esi,%ecx xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 44(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp shrdl $7,%edx,%edx movl %ecx,%esi xorl %ebp,%edi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx vpalignr $8,%xmm5,%xmm6,%xmm8 vpxor %xmm3,%xmm7,%xmm7 addl 48(%rsp),%eax andl %edx,%esi xorl %ebp,%edx shrdl $7,%ecx,%ecx vpxor %xmm0,%xmm7,%xmm7 movl %ebx,%edi xorl %edx,%esi vpaddd %xmm6,%xmm11,%xmm9 vmovdqa 32(%r14),%xmm11 shldl $5,%ebx,%ebx addl %esi,%eax vpxor %xmm8,%xmm7,%xmm7 xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 52(%rsp),%ebp vpsrld $30,%xmm7,%xmm8 vmovdqa %xmm9,32(%rsp) andl %ecx,%edi xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi vpslld $2,%xmm7,%xmm7 xorl %ecx,%edi shldl $5,%eax,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp addl 56(%rsp),%edx andl %ebx,%esi vpor %xmm8,%xmm7,%xmm7 xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%edi xorl %ebx,%esi shldl $5,%ebp,%ebp addl %esi,%edx xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 60(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%esi xorl %eax,%edi shldl $5,%edx,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx vpalignr $8,%xmm6,%xmm7,%xmm8 vpxor %xmm4,%xmm0,%xmm0 addl 0(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp shrdl $7,%edx,%edx vpxor %xmm1,%xmm0,%xmm0 movl %ecx,%edi xorl %ebp,%esi vpaddd %xmm7,%xmm11,%xmm9 shldl $5,%ecx,%ecx addl %esi,%ebx vpxor %xmm8,%xmm0,%xmm0 xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 4(%rsp),%eax vpsrld $30,%xmm0,%xmm8 vmovdqa %xmm9,48(%rsp) andl %edx,%edi xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi vpslld $2,%xmm0,%xmm0 xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax addl 8(%rsp),%ebp andl %ecx,%esi vpor %xmm8,%xmm0,%xmm0 xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%edi xorl %ecx,%esi shldl $5,%eax,%eax addl %esi,%ebp xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 12(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%esi xorl %ebx,%edi shldl $5,%ebp,%ebp addl %edi,%edx xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx vpalignr $8,%xmm7,%xmm0,%xmm8 vpxor %xmm5,%xmm1,%xmm1 addl 16(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax shrdl $7,%ebp,%ebp vpxor %xmm2,%xmm1,%xmm1 movl %edx,%edi xorl %eax,%esi vpaddd %xmm0,%xmm11,%xmm9 shldl $5,%edx,%edx addl %esi,%ecx vpxor %xmm8,%xmm1,%xmm1 xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 20(%rsp),%ebx vpsrld $30,%xmm1,%xmm8 vmovdqa %xmm9,0(%rsp) andl %ebp,%edi xorl %eax,%ebp shrdl $7,%edx,%edx movl %ecx,%esi vpslld $2,%xmm1,%xmm1 xorl %ebp,%edi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx addl 24(%rsp),%eax andl %edx,%esi vpor %xmm8,%xmm1,%xmm1 xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%edi xorl %edx,%esi shldl $5,%ebx,%ebx addl %esi,%eax xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 28(%rsp),%ebp andl %ecx,%edi xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi xorl %ecx,%edi shldl $5,%eax,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp vpalignr $8,%xmm0,%xmm1,%xmm8 vpxor %xmm6,%xmm2,%xmm2 addl 32(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx shrdl $7,%eax,%eax vpxor %xmm3,%xmm2,%xmm2 movl %ebp,%edi xorl %ebx,%esi vpaddd %xmm1,%xmm11,%xmm9 shldl $5,%ebp,%ebp addl %esi,%edx vpxor %xmm8,%xmm2,%xmm2 xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 36(%rsp),%ecx vpsrld $30,%xmm2,%xmm8 vmovdqa %xmm9,16(%rsp) andl %eax,%edi xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%esi vpslld $2,%xmm2,%xmm2 xorl %eax,%edi shldl $5,%edx,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx addl 40(%rsp),%ebx andl %ebp,%esi vpor %xmm8,%xmm2,%xmm2 xorl %eax,%ebp shrdl $7,%edx,%edx movl %ecx,%edi xorl %ebp,%esi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 44(%rsp),%eax andl %edx,%edi xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi addl %ebx,%eax vpalignr $8,%xmm1,%xmm2,%xmm8 vpxor %xmm7,%xmm3,%xmm3 addl 48(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax vpxor %xmm4,%xmm3,%xmm3 addl %esi,%ebp xorl %ecx,%edi vpaddd %xmm2,%xmm11,%xmm9 shrdl $7,%ebx,%ebx addl %eax,%ebp vpxor %xmm8,%xmm3,%xmm3 addl 52(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp vpsrld $30,%xmm3,%xmm8 vmovdqa %xmm9,32(%rsp) addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpslld $2,%xmm3,%xmm3 addl 56(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vpor %xmm8,%xmm3,%xmm3 addl 60(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 0(%rsp),%eax vpaddd %xmm3,%xmm11,%xmm9 xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax vmovdqa %xmm9,48(%rsp) xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 4(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 8(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx addl 12(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx cmpq %r10,%r9 je L$done_avx vmovdqa 64(%r14),%xmm6 vmovdqa -64(%r14),%xmm11 vmovdqu 0(%r9),%xmm0 vmovdqu 16(%r9),%xmm1 vmovdqu 32(%r9),%xmm2 vmovdqu 48(%r9),%xmm3 vpshufb %xmm6,%xmm0,%xmm0 addq $64,%r9 addl 16(%rsp),%ebx xorl %ebp,%esi vpshufb %xmm6,%xmm1,%xmm1 movl %ecx,%edi shldl $5,%ecx,%ecx vpaddd %xmm11,%xmm0,%xmm4 addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx vmovdqa %xmm4,0(%rsp) addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi vpshufb %xmm6,%xmm2,%xmm2 movl %edx,%edi shldl $5,%edx,%edx vpaddd %xmm11,%xmm1,%xmm5 addl %esi,%ecx xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vmovdqa %xmm5,16(%rsp) addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi vpshufb %xmm6,%xmm3,%xmm3 movl %ebp,%edi shldl $5,%ebp,%ebp vpaddd %xmm11,%xmm2,%xmm6 addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx vmovdqa %xmm6,32(%rsp) addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax addl 0(%r8),%eax addl 4(%r8),%esi addl 8(%r8),%ecx addl 12(%r8),%edx movl %eax,0(%r8) addl 16(%r8),%ebp movl %esi,4(%r8) movl %esi,%ebx movl %ecx,8(%r8) movl %ecx,%edi movl %edx,12(%r8) xorl %edx,%edi movl %ebp,16(%r8) andl %edi,%esi jmp L$oop_avx .p2align 4 L$done_avx: addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax vzeroupper addl 0(%r8),%eax addl 4(%r8),%esi addl 8(%r8),%ecx movl %eax,0(%r8) addl 12(%r8),%edx movl %esi,4(%r8) addl 16(%r8),%ebp movl %ecx,8(%r8) movl %edx,12(%r8) movl %ebp,16(%r8) movq -40(%r11),%r14 movq -32(%r11),%r13 movq -24(%r11),%r12 movq -16(%r11),%rbp movq -8(%r11),%rbx leaq (%r11),%rsp L$epilogue_avx: .byte 0xf3,0xc3 .globl _sha1_block_data_order_avx2 .private_extern _sha1_block_data_order_avx2 .p2align 4 _sha1_block_data_order_avx2: _CET_ENDBR movq %rsp,%r11 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 vzeroupper movq %rdi,%r8 movq %rsi,%r9 movq %rdx,%r10 leaq -640(%rsp),%rsp shlq $6,%r10 leaq 64(%r9),%r13 andq $-128,%rsp addq %r9,%r10 leaq K_XX_XX+64(%rip),%r14 movl 0(%r8),%eax cmpq %r10,%r13 cmovaeq %r9,%r13 movl 4(%r8),%ebp movl 8(%r8),%ecx movl 12(%r8),%edx movl 16(%r8),%esi vmovdqu 64(%r14),%ymm6 vmovdqu (%r9),%xmm0 vmovdqu 16(%r9),%xmm1 vmovdqu 32(%r9),%xmm2 vmovdqu 48(%r9),%xmm3 leaq 64(%r9),%r9 vinserti128 $1,(%r13),%ymm0,%ymm0 vinserti128 $1,16(%r13),%ymm1,%ymm1 vpshufb %ymm6,%ymm0,%ymm0 vinserti128 $1,32(%r13),%ymm2,%ymm2 vpshufb %ymm6,%ymm1,%ymm1 vinserti128 $1,48(%r13),%ymm3,%ymm3 vpshufb %ymm6,%ymm2,%ymm2 vmovdqu -64(%r14),%ymm11 vpshufb %ymm6,%ymm3,%ymm3 vpaddd %ymm11,%ymm0,%ymm4 vpaddd %ymm11,%ymm1,%ymm5 vmovdqu %ymm4,0(%rsp) vpaddd %ymm11,%ymm2,%ymm6 vmovdqu %ymm5,32(%rsp) vpaddd %ymm11,%ymm3,%ymm7 vmovdqu %ymm6,64(%rsp) vmovdqu %ymm7,96(%rsp) vpalignr $8,%ymm0,%ymm1,%ymm4 vpsrldq $4,%ymm3,%ymm8 vpxor %ymm0,%ymm4,%ymm4 vpxor %ymm2,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $31,%ymm4,%ymm8 vpslldq $12,%ymm4,%ymm10 vpaddd %ymm4,%ymm4,%ymm4 vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm4,%ymm4 vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm4,%ymm4 vpxor %ymm10,%ymm4,%ymm4 vpaddd %ymm11,%ymm4,%ymm9 vmovdqu %ymm9,128(%rsp) vpalignr $8,%ymm1,%ymm2,%ymm5 vpsrldq $4,%ymm4,%ymm8 vpxor %ymm1,%ymm5,%ymm5 vpxor %ymm3,%ymm8,%ymm8 vpxor %ymm8,%ymm5,%ymm5 vpsrld $31,%ymm5,%ymm8 vmovdqu -32(%r14),%ymm11 vpslldq $12,%ymm5,%ymm10 vpaddd %ymm5,%ymm5,%ymm5 vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm5,%ymm5 vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm10,%ymm5,%ymm5 vpaddd %ymm11,%ymm5,%ymm9 vmovdqu %ymm9,160(%rsp) vpalignr $8,%ymm2,%ymm3,%ymm6 vpsrldq $4,%ymm5,%ymm8 vpxor %ymm2,%ymm6,%ymm6 vpxor %ymm4,%ymm8,%ymm8 vpxor %ymm8,%ymm6,%ymm6 vpsrld $31,%ymm6,%ymm8 vpslldq $12,%ymm6,%ymm10 vpaddd %ymm6,%ymm6,%ymm6 vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm6,%ymm6 vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm6,%ymm6 vpxor %ymm10,%ymm6,%ymm6 vpaddd %ymm11,%ymm6,%ymm9 vmovdqu %ymm9,192(%rsp) vpalignr $8,%ymm3,%ymm4,%ymm7 vpsrldq $4,%ymm6,%ymm8 vpxor %ymm3,%ymm7,%ymm7 vpxor %ymm5,%ymm8,%ymm8 vpxor %ymm8,%ymm7,%ymm7 vpsrld $31,%ymm7,%ymm8 vpslldq $12,%ymm7,%ymm10 vpaddd %ymm7,%ymm7,%ymm7 vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm7,%ymm7 vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm7,%ymm7 vpxor %ymm10,%ymm7,%ymm7 vpaddd %ymm11,%ymm7,%ymm9 vmovdqu %ymm9,224(%rsp) leaq 128(%rsp),%r13 jmp L$oop_avx2 .p2align 5 L$oop_avx2: rorxl $2,%ebp,%ebx andnl %edx,%ebp,%edi andl %ecx,%ebp xorl %edi,%ebp jmp L$align32_1 .p2align 5 L$align32_1: vpalignr $8,%ymm6,%ymm7,%ymm8 vpxor %ymm4,%ymm0,%ymm0 addl -128(%r13),%esi andnl %ecx,%eax,%edi vpxor %ymm1,%ymm0,%ymm0 addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp vpxor %ymm8,%ymm0,%ymm0 andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax vpsrld $30,%ymm0,%ymm8 vpslld $2,%ymm0,%ymm0 addl -124(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi vpor %ymm8,%ymm0,%ymm0 addl %r12d,%edx xorl %edi,%esi addl -120(%r13),%ecx andnl %ebp,%edx,%edi vpaddd %ymm11,%ymm0,%ymm9 addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx vmovdqu %ymm9,256(%rsp) addl %r12d,%ecx xorl %edi,%edx addl -116(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx addl -96(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx vpalignr $8,%ymm7,%ymm0,%ymm8 vpxor %ymm5,%ymm1,%ymm1 addl -92(%r13),%eax andnl %edx,%ebp,%edi vpxor %ymm2,%ymm1,%ymm1 addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx vpxor %ymm8,%ymm1,%ymm1 andl %ecx,%ebp addl %r12d,%eax xorl %edi,%ebp vpsrld $30,%ymm1,%ymm8 vpslld $2,%ymm1,%ymm1 addl -88(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax vpor %ymm8,%ymm1,%ymm1 addl %r12d,%esi xorl %edi,%eax addl -84(%r13),%edx andnl %ebx,%esi,%edi vpaddd %ymm11,%ymm1,%ymm9 addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi vmovdqu %ymm9,288(%rsp) addl %r12d,%edx xorl %edi,%esi addl -64(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx addl -60(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx vpalignr $8,%ymm0,%ymm1,%ymm8 vpxor %ymm6,%ymm2,%ymm2 addl -56(%r13),%ebp andnl %esi,%ebx,%edi vpxor %ymm3,%ymm2,%ymm2 vmovdqu 0(%r14),%ymm11 addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx vpxor %ymm8,%ymm2,%ymm2 andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx vpsrld $30,%ymm2,%ymm8 vpslld $2,%ymm2,%ymm2 addl -52(%r13),%eax andnl %edx,%ebp,%edi addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp vpor %ymm8,%ymm2,%ymm2 addl %r12d,%eax xorl %edi,%ebp addl -32(%r13),%esi andnl %ecx,%eax,%edi vpaddd %ymm11,%ymm2,%ymm9 addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax vmovdqu %ymm9,320(%rsp) addl %r12d,%esi xorl %edi,%eax addl -28(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi addl %r12d,%edx xorl %edi,%esi addl -24(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx vpalignr $8,%ymm1,%ymm2,%ymm8 vpxor %ymm7,%ymm3,%ymm3 addl -20(%r13),%ebx andnl %eax,%ecx,%edi vpxor %ymm4,%ymm3,%ymm3 addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx vpxor %ymm8,%ymm3,%ymm3 andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx vpsrld $30,%ymm3,%ymm8 vpslld $2,%ymm3,%ymm3 addl 0(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx vpor %ymm8,%ymm3,%ymm3 addl %r12d,%ebp xorl %edi,%ebx addl 4(%r13),%eax andnl %edx,%ebp,%edi vpaddd %ymm11,%ymm3,%ymm9 addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp vmovdqu %ymm9,352(%rsp) addl %r12d,%eax xorl %edi,%ebp addl 8(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl 12(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi vpalignr $8,%ymm2,%ymm3,%ymm8 vpxor %ymm0,%ymm4,%ymm4 addl 32(%r13),%ecx leal (%rcx,%rsi,1),%ecx vpxor %ymm5,%ymm4,%ymm4 rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx vpxor %ymm8,%ymm4,%ymm4 addl %r12d,%ecx xorl %ebp,%edx addl 36(%r13),%ebx vpsrld $30,%ymm4,%ymm8 vpslld $2,%ymm4,%ymm4 leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx vpor %ymm8,%ymm4,%ymm4 addl 40(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx vpaddd %ymm11,%ymm4,%ymm9 xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl 44(%r13),%eax vmovdqu %ymm9,384(%rsp) leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl 64(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax vpalignr $8,%ymm3,%ymm4,%ymm8 vpxor %ymm1,%ymm5,%ymm5 addl 68(%r13),%edx leal (%rdx,%rax,1),%edx vpxor %ymm6,%ymm5,%ymm5 rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi vpxor %ymm8,%ymm5,%ymm5 addl %r12d,%edx xorl %ebx,%esi addl 72(%r13),%ecx vpsrld $30,%ymm5,%ymm8 vpslld $2,%ymm5,%ymm5 leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx vpor %ymm8,%ymm5,%ymm5 addl 76(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx vpaddd %ymm11,%ymm5,%ymm9 xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl 96(%r13),%ebp vmovdqu %ymm9,416(%rsp) leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl 100(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp vpalignr $8,%ymm4,%ymm5,%ymm8 vpxor %ymm2,%ymm6,%ymm6 addl 104(%r13),%esi leal (%rsi,%rbp,1),%esi vpxor %ymm7,%ymm6,%ymm6 rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax vpxor %ymm8,%ymm6,%ymm6 addl %r12d,%esi xorl %ecx,%eax addl 108(%r13),%edx leaq 256(%r13),%r13 vpsrld $30,%ymm6,%ymm8 vpslld $2,%ymm6,%ymm6 leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi vpor %ymm8,%ymm6,%ymm6 addl -128(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi vpaddd %ymm11,%ymm6,%ymm9 xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -124(%r13),%ebx vmovdqu %ymm9,448(%rsp) leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -120(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx vpalignr $8,%ymm5,%ymm6,%ymm8 vpxor %ymm3,%ymm7,%ymm7 addl -116(%r13),%eax leal (%rax,%rbx,1),%eax vpxor %ymm0,%ymm7,%ymm7 vmovdqu 32(%r14),%ymm11 rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp vpxor %ymm8,%ymm7,%ymm7 addl %r12d,%eax xorl %edx,%ebp addl -96(%r13),%esi vpsrld $30,%ymm7,%ymm8 vpslld $2,%ymm7,%ymm7 leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax vpor %ymm8,%ymm7,%ymm7 addl -92(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax vpaddd %ymm11,%ymm7,%ymm9 xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -88(%r13),%ecx vmovdqu %ymm9,480(%rsp) leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -84(%r13),%ebx movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx jmp L$align32_2 .p2align 5 L$align32_2: vpalignr $8,%ymm6,%ymm7,%ymm8 vpxor %ymm4,%ymm0,%ymm0 addl -64(%r13),%ebp xorl %esi,%ecx vpxor %ymm1,%ymm0,%ymm0 movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp vpxor %ymm8,%ymm0,%ymm0 rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx vpsrld $30,%ymm0,%ymm8 vpslld $2,%ymm0,%ymm0 addl %r12d,%ebp andl %edi,%ebx addl -60(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi vpor %ymm8,%ymm0,%ymm0 leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp vpaddd %ymm11,%ymm0,%ymm9 addl %r12d,%eax andl %edi,%ebp addl -56(%r13),%esi xorl %ecx,%ebp vmovdqu %ymm9,512(%rsp) movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax addl -52(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx andl %edi,%esi addl -32(%r13),%ecx xorl %ebp,%esi movl %eax,%edi xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx andl %edi,%edx vpalignr $8,%ymm7,%ymm0,%ymm8 vpxor %ymm5,%ymm1,%ymm1 addl -28(%r13),%ebx xorl %eax,%edx vpxor %ymm2,%ymm1,%ymm1 movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx vpxor %ymm8,%ymm1,%ymm1 rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx vpsrld $30,%ymm1,%ymm8 vpslld $2,%ymm1,%ymm1 addl %r12d,%ebx andl %edi,%ecx addl -24(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi vpor %ymm8,%ymm1,%ymm1 leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx vpaddd %ymm11,%ymm1,%ymm9 addl %r12d,%ebp andl %edi,%ebx addl -20(%r13),%eax xorl %edx,%ebx vmovdqu %ymm9,544(%rsp) movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl 0(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax addl 4(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx andl %edi,%esi vpalignr $8,%ymm0,%ymm1,%ymm8 vpxor %ymm6,%ymm2,%ymm2 addl 8(%r13),%ecx xorl %ebp,%esi vpxor %ymm3,%ymm2,%ymm2 movl %eax,%edi xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx vpxor %ymm8,%ymm2,%ymm2 rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx vpsrld $30,%ymm2,%ymm8 vpslld $2,%ymm2,%ymm2 addl %r12d,%ecx andl %edi,%edx addl 12(%r13),%ebx xorl %eax,%edx movl %esi,%edi xorl %eax,%edi vpor %ymm8,%ymm2,%ymm2 leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx vpaddd %ymm11,%ymm2,%ymm9 addl %r12d,%ebx andl %edi,%ecx addl 32(%r13),%ebp xorl %esi,%ecx vmovdqu %ymm9,576(%rsp) movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl 36(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl 40(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax vpalignr $8,%ymm1,%ymm2,%ymm8 vpxor %ymm7,%ymm3,%ymm3 addl 44(%r13),%edx xorl %ebx,%eax vpxor %ymm4,%ymm3,%ymm3 movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx vpxor %ymm8,%ymm3,%ymm3 rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi vpsrld $30,%ymm3,%ymm8 vpslld $2,%ymm3,%ymm3 addl %r12d,%edx andl %edi,%esi addl 64(%r13),%ecx xorl %ebp,%esi movl %eax,%edi xorl %ebp,%edi vpor %ymm8,%ymm3,%ymm3 leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx vpaddd %ymm11,%ymm3,%ymm9 addl %r12d,%ecx andl %edi,%edx addl 68(%r13),%ebx xorl %eax,%edx vmovdqu %ymm9,608(%rsp) movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx addl 72(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl 76(%r13),%eax xorl %edx,%ebx leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl 96(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl 100(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl 104(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl 108(%r13),%ebx leaq 256(%r13),%r13 leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -128(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl -124(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -120(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -116(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -96(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -92(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -88(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl -84(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -64(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -60(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -56(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -52(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -32(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl -28(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -24(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -20(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d addl %r12d,%edx leaq 128(%r9),%r13 leaq 128(%r9),%rdi cmpq %r10,%r13 cmovaeq %r9,%r13 addl 0(%r8),%edx addl 4(%r8),%esi addl 8(%r8),%ebp movl %edx,0(%r8) addl 12(%r8),%ebx movl %esi,4(%r8) movl %edx,%eax addl 16(%r8),%ecx movl %ebp,%r12d movl %ebp,8(%r8) movl %ebx,%edx movl %ebx,12(%r8) movl %esi,%ebp movl %ecx,16(%r8) movl %ecx,%esi movl %r12d,%ecx cmpq %r10,%r9 je L$done_avx2 vmovdqu 64(%r14),%ymm6 cmpq %r10,%rdi ja L$ast_avx2 vmovdqu -64(%rdi),%xmm0 vmovdqu -48(%rdi),%xmm1 vmovdqu -32(%rdi),%xmm2 vmovdqu -16(%rdi),%xmm3 vinserti128 $1,0(%r13),%ymm0,%ymm0 vinserti128 $1,16(%r13),%ymm1,%ymm1 vinserti128 $1,32(%r13),%ymm2,%ymm2 vinserti128 $1,48(%r13),%ymm3,%ymm3 jmp L$ast_avx2 .p2align 5 L$ast_avx2: leaq 128+16(%rsp),%r13 rorxl $2,%ebp,%ebx andnl %edx,%ebp,%edi andl %ecx,%ebp xorl %edi,%ebp subq $-128,%r9 addl -128(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl -124(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi addl %r12d,%edx xorl %edi,%esi addl -120(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx addl -116(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx addl -96(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx addl -92(%r13),%eax andnl %edx,%ebp,%edi addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp addl %r12d,%eax xorl %edi,%ebp addl -88(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl -84(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi addl %r12d,%edx xorl %edi,%esi addl -64(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx addl -60(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx addl -56(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx addl -52(%r13),%eax andnl %edx,%ebp,%edi addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp addl %r12d,%eax xorl %edi,%ebp addl -32(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl -28(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi addl %r12d,%edx xorl %edi,%esi addl -24(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx addl -20(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx addl 0(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx addl 4(%r13),%eax andnl %edx,%ebp,%edi addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp addl %r12d,%eax xorl %edi,%ebp addl 8(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl 12(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl 32(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl 36(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl 40(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl 44(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl 64(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax vmovdqu -64(%r14),%ymm11 vpshufb %ymm6,%ymm0,%ymm0 addl 68(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl 72(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl 76(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl 96(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl 100(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp vpshufb %ymm6,%ymm1,%ymm1 vpaddd %ymm11,%ymm0,%ymm8 addl 104(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl 108(%r13),%edx leaq 256(%r13),%r13 leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -128(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -124(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -120(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx vmovdqu %ymm8,0(%rsp) vpshufb %ymm6,%ymm2,%ymm2 vpaddd %ymm11,%ymm1,%ymm9 addl -116(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -96(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -92(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -88(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -84(%r13),%ebx movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx vmovdqu %ymm9,32(%rsp) vpshufb %ymm6,%ymm3,%ymm3 vpaddd %ymm11,%ymm2,%ymm6 addl -64(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl -60(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl -56(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax addl -52(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx andl %edi,%esi addl -32(%r13),%ecx xorl %ebp,%esi movl %eax,%edi xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx andl %edi,%edx jmp L$align32_3 .p2align 5 L$align32_3: vmovdqu %ymm6,64(%rsp) vpaddd %ymm11,%ymm3,%ymm7 addl -28(%r13),%ebx xorl %eax,%edx movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx addl -24(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl -20(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl 0(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax addl 4(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx andl %edi,%esi vmovdqu %ymm7,96(%rsp) addl 8(%r13),%ecx xorl %ebp,%esi movl %eax,%edi xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx andl %edi,%edx addl 12(%r13),%ebx xorl %eax,%edx movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx addl 32(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl 36(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl 40(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax vpalignr $8,%ymm0,%ymm1,%ymm4 addl 44(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi vpsrldq $4,%ymm3,%ymm8 leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax vpxor %ymm0,%ymm4,%ymm4 vpxor %ymm2,%ymm8,%ymm8 xorl %ebp,%esi addl %r12d,%edx vpxor %ymm8,%ymm4,%ymm4 andl %edi,%esi addl 64(%r13),%ecx xorl %ebp,%esi movl %eax,%edi vpsrld $31,%ymm4,%ymm8 xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d vpslldq $12,%ymm4,%ymm10 vpaddd %ymm4,%ymm4,%ymm4 rorxl $2,%edx,%esi xorl %eax,%edx vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm4,%ymm4 addl %r12d,%ecx andl %edi,%edx vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm4,%ymm4 addl 68(%r13),%ebx xorl %eax,%edx vpxor %ymm10,%ymm4,%ymm4 movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx vpaddd %ymm11,%ymm4,%ymm9 rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx vmovdqu %ymm9,128(%rsp) addl %r12d,%ebx andl %edi,%ecx addl 72(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl 76(%r13),%eax xorl %edx,%ebx leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp vpalignr $8,%ymm1,%ymm2,%ymm5 addl 96(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp vpsrldq $4,%ymm4,%ymm8 xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax vpxor %ymm1,%ymm5,%ymm5 vpxor %ymm3,%ymm8,%ymm8 addl 100(%r13),%edx leal (%rdx,%rax,1),%edx vpxor %ymm8,%ymm5,%ymm5 rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx vpsrld $31,%ymm5,%ymm8 vmovdqu -32(%r14),%ymm11 xorl %ebx,%esi addl 104(%r13),%ecx leal (%rcx,%rsi,1),%ecx vpslldq $12,%ymm5,%ymm10 vpaddd %ymm5,%ymm5,%ymm5 rorxl $27,%edx,%r12d rorxl $2,%edx,%esi vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm5,%ymm5 xorl %eax,%edx addl %r12d,%ecx vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm5,%ymm5 xorl %ebp,%edx addl 108(%r13),%ebx leaq 256(%r13),%r13 vpxor %ymm10,%ymm5,%ymm5 leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx vpaddd %ymm11,%ymm5,%ymm9 xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx vmovdqu %ymm9,160(%rsp) addl -128(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx vpalignr $8,%ymm2,%ymm3,%ymm6 addl -124(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx vpsrldq $4,%ymm5,%ymm8 xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp vpxor %ymm2,%ymm6,%ymm6 vpxor %ymm4,%ymm8,%ymm8 addl -120(%r13),%esi leal (%rsi,%rbp,1),%esi vpxor %ymm8,%ymm6,%ymm6 rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi vpsrld $31,%ymm6,%ymm8 xorl %ecx,%eax addl -116(%r13),%edx leal (%rdx,%rax,1),%edx vpslldq $12,%ymm6,%ymm10 vpaddd %ymm6,%ymm6,%ymm6 rorxl $27,%esi,%r12d rorxl $2,%esi,%eax vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm6,%ymm6 xorl %ebp,%esi addl %r12d,%edx vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm6,%ymm6 xorl %ebx,%esi addl -96(%r13),%ecx vpxor %ymm10,%ymm6,%ymm6 leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi vpaddd %ymm11,%ymm6,%ymm9 xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx vmovdqu %ymm9,192(%rsp) addl -92(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx vpalignr $8,%ymm3,%ymm4,%ymm7 addl -88(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx vpsrldq $4,%ymm6,%ymm8 xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx vpxor %ymm3,%ymm7,%ymm7 vpxor %ymm5,%ymm8,%ymm8 addl -84(%r13),%eax leal (%rax,%rbx,1),%eax vpxor %ymm8,%ymm7,%ymm7 rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax vpsrld $31,%ymm7,%ymm8 xorl %edx,%ebp addl -64(%r13),%esi leal (%rsi,%rbp,1),%esi vpslldq $12,%ymm7,%ymm10 vpaddd %ymm7,%ymm7,%ymm7 rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm7,%ymm7 xorl %ebx,%eax addl %r12d,%esi vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm7,%ymm7 xorl %ecx,%eax addl -60(%r13),%edx vpxor %ymm10,%ymm7,%ymm7 leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax vpaddd %ymm11,%ymm7,%ymm9 xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi vmovdqu %ymm9,224(%rsp) addl -56(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -52(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -32(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl -28(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -24(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -20(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d addl %r12d,%edx leaq 128(%rsp),%r13 addl 0(%r8),%edx addl 4(%r8),%esi addl 8(%r8),%ebp movl %edx,0(%r8) addl 12(%r8),%ebx movl %esi,4(%r8) movl %edx,%eax addl 16(%r8),%ecx movl %ebp,%r12d movl %ebp,8(%r8) movl %ebx,%edx movl %ebx,12(%r8) movl %esi,%ebp movl %ecx,16(%r8) movl %ecx,%esi movl %r12d,%ecx cmpq %r10,%r9 jbe L$oop_avx2 L$done_avx2: vzeroupper movq -40(%r11),%r14 movq -32(%r11),%r13 movq -24(%r11),%r12 movq -16(%r11),%rbp movq -8(%r11),%rbx leaq (%r11),%rsp L$epilogue_avx2: .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 K_XX_XX: .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .byte 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0 .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .p2align 6 .text #endif
marvin-hansen/iggy-streaming-system
69,659
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/p256-x86_64-asm.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .section __DATA,__const .p2align 6 L$poly: .quad 0xffffffffffffffff, 0x00000000ffffffff, 0x0000000000000000, 0xffffffff00000001 L$One: .long 1,1,1,1,1,1,1,1 L$Two: .long 2,2,2,2,2,2,2,2 L$Three: .long 3,3,3,3,3,3,3,3 L$ONE_mont: .quad 0x0000000000000001, 0xffffffff00000000, 0xffffffffffffffff, 0x00000000fffffffe L$ord: .quad 0xf3b9cac2fc632551, 0xbce6faada7179e84, 0xffffffffffffffff, 0xffffffff00000000 L$ordK: .quad 0xccd1c8aaee00bc4f .text .globl _ecp_nistz256_neg .private_extern _ecp_nistz256_neg .p2align 5 _ecp_nistz256_neg: _CET_ENDBR pushq %r12 pushq %r13 L$neg_body: xorq %r8,%r8 xorq %r9,%r9 xorq %r10,%r10 xorq %r11,%r11 xorq %r13,%r13 subq 0(%rsi),%r8 sbbq 8(%rsi),%r9 sbbq 16(%rsi),%r10 movq %r8,%rax sbbq 24(%rsi),%r11 leaq L$poly(%rip),%rsi movq %r9,%rdx sbbq $0,%r13 addq 0(%rsi),%r8 movq %r10,%rcx adcq 8(%rsi),%r9 adcq 16(%rsi),%r10 movq %r11,%r12 adcq 24(%rsi),%r11 testq %r13,%r13 cmovzq %rax,%r8 cmovzq %rdx,%r9 movq %r8,0(%rdi) cmovzq %rcx,%r10 movq %r9,8(%rdi) cmovzq %r12,%r11 movq %r10,16(%rdi) movq %r11,24(%rdi) movq 0(%rsp),%r13 movq 8(%rsp),%r12 leaq 16(%rsp),%rsp L$neg_epilogue: .byte 0xf3,0xc3 .globl _ecp_nistz256_ord_mul_mont .private_extern _ecp_nistz256_ord_mul_mont .p2align 5 _ecp_nistz256_ord_mul_mont: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je L$ecp_nistz256_ord_mul_montx #endif pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$ord_mul_body: movq 0(%rdx),%rax movq %rdx,%rbx leaq L$ord(%rip),%r14 movq L$ordK(%rip),%r15 movq %rax,%rcx mulq 0(%rsi) movq %rax,%r8 movq %rcx,%rax movq %rdx,%r9 mulq 8(%rsi) addq %rax,%r9 movq %rcx,%rax adcq $0,%rdx movq %rdx,%r10 mulq 16(%rsi) addq %rax,%r10 movq %rcx,%rax adcq $0,%rdx movq %r8,%r13 imulq %r15,%r8 movq %rdx,%r11 mulq 24(%rsi) addq %rax,%r11 movq %r8,%rax adcq $0,%rdx movq %rdx,%r12 mulq 0(%r14) movq %r8,%rbp addq %rax,%r13 movq %r8,%rax adcq $0,%rdx movq %rdx,%rcx subq %r8,%r10 sbbq $0,%r8 mulq 8(%r14) addq %rcx,%r9 adcq $0,%rdx addq %rax,%r9 movq %rbp,%rax adcq %rdx,%r10 movq %rbp,%rdx adcq $0,%r8 shlq $32,%rax shrq $32,%rdx subq %rax,%r11 movq 8(%rbx),%rax sbbq %rdx,%rbp addq %r8,%r11 adcq %rbp,%r12 adcq $0,%r13 movq %rax,%rcx mulq 0(%rsi) addq %rax,%r9 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 8(%rsi) addq %rbp,%r10 adcq $0,%rdx addq %rax,%r10 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 16(%rsi) addq %rbp,%r11 adcq $0,%rdx addq %rax,%r11 movq %rcx,%rax adcq $0,%rdx movq %r9,%rcx imulq %r15,%r9 movq %rdx,%rbp mulq 24(%rsi) addq %rbp,%r12 adcq $0,%rdx xorq %r8,%r8 addq %rax,%r12 movq %r9,%rax adcq %rdx,%r13 adcq $0,%r8 mulq 0(%r14) movq %r9,%rbp addq %rax,%rcx movq %r9,%rax adcq %rdx,%rcx subq %r9,%r11 sbbq $0,%r9 mulq 8(%r14) addq %rcx,%r10 adcq $0,%rdx addq %rax,%r10 movq %rbp,%rax adcq %rdx,%r11 movq %rbp,%rdx adcq $0,%r9 shlq $32,%rax shrq $32,%rdx subq %rax,%r12 movq 16(%rbx),%rax sbbq %rdx,%rbp addq %r9,%r12 adcq %rbp,%r13 adcq $0,%r8 movq %rax,%rcx mulq 0(%rsi) addq %rax,%r10 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 8(%rsi) addq %rbp,%r11 adcq $0,%rdx addq %rax,%r11 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 16(%rsi) addq %rbp,%r12 adcq $0,%rdx addq %rax,%r12 movq %rcx,%rax adcq $0,%rdx movq %r10,%rcx imulq %r15,%r10 movq %rdx,%rbp mulq 24(%rsi) addq %rbp,%r13 adcq $0,%rdx xorq %r9,%r9 addq %rax,%r13 movq %r10,%rax adcq %rdx,%r8 adcq $0,%r9 mulq 0(%r14) movq %r10,%rbp addq %rax,%rcx movq %r10,%rax adcq %rdx,%rcx subq %r10,%r12 sbbq $0,%r10 mulq 8(%r14) addq %rcx,%r11 adcq $0,%rdx addq %rax,%r11 movq %rbp,%rax adcq %rdx,%r12 movq %rbp,%rdx adcq $0,%r10 shlq $32,%rax shrq $32,%rdx subq %rax,%r13 movq 24(%rbx),%rax sbbq %rdx,%rbp addq %r10,%r13 adcq %rbp,%r8 adcq $0,%r9 movq %rax,%rcx mulq 0(%rsi) addq %rax,%r11 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 8(%rsi) addq %rbp,%r12 adcq $0,%rdx addq %rax,%r12 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 16(%rsi) addq %rbp,%r13 adcq $0,%rdx addq %rax,%r13 movq %rcx,%rax adcq $0,%rdx movq %r11,%rcx imulq %r15,%r11 movq %rdx,%rbp mulq 24(%rsi) addq %rbp,%r8 adcq $0,%rdx xorq %r10,%r10 addq %rax,%r8 movq %r11,%rax adcq %rdx,%r9 adcq $0,%r10 mulq 0(%r14) movq %r11,%rbp addq %rax,%rcx movq %r11,%rax adcq %rdx,%rcx subq %r11,%r13 sbbq $0,%r11 mulq 8(%r14) addq %rcx,%r12 adcq $0,%rdx addq %rax,%r12 movq %rbp,%rax adcq %rdx,%r13 movq %rbp,%rdx adcq $0,%r11 shlq $32,%rax shrq $32,%rdx subq %rax,%r8 sbbq %rdx,%rbp addq %r11,%r8 adcq %rbp,%r9 adcq $0,%r10 movq %r12,%rsi subq 0(%r14),%r12 movq %r13,%r11 sbbq 8(%r14),%r13 movq %r8,%rcx sbbq 16(%r14),%r8 movq %r9,%rbp sbbq 24(%r14),%r9 sbbq $0,%r10 cmovcq %rsi,%r12 cmovcq %r11,%r13 cmovcq %rcx,%r8 cmovcq %rbp,%r9 movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 0(%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r13 movq 24(%rsp),%r12 movq 32(%rsp),%rbx movq 40(%rsp),%rbp leaq 48(%rsp),%rsp L$ord_mul_epilogue: .byte 0xf3,0xc3 .globl _ecp_nistz256_ord_sqr_mont .private_extern _ecp_nistz256_ord_sqr_mont .p2align 5 _ecp_nistz256_ord_sqr_mont: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je L$ecp_nistz256_ord_sqr_montx #endif pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$ord_sqr_body: movq 0(%rsi),%r8 movq 8(%rsi),%rax movq 16(%rsi),%r14 movq 24(%rsi),%r15 leaq L$ord(%rip),%rsi movq %rdx,%rbx jmp L$oop_ord_sqr .p2align 5 L$oop_ord_sqr: movq %rax,%rbp mulq %r8 movq %rax,%r9 .byte 102,72,15,110,205 movq %r14,%rax movq %rdx,%r10 mulq %r8 addq %rax,%r10 movq %r15,%rax .byte 102,73,15,110,214 adcq $0,%rdx movq %rdx,%r11 mulq %r8 addq %rax,%r11 movq %r15,%rax .byte 102,73,15,110,223 adcq $0,%rdx movq %rdx,%r12 mulq %r14 movq %rax,%r13 movq %r14,%rax movq %rdx,%r14 mulq %rbp addq %rax,%r11 movq %r15,%rax adcq $0,%rdx movq %rdx,%r15 mulq %rbp addq %rax,%r12 adcq $0,%rdx addq %r15,%r12 adcq %rdx,%r13 adcq $0,%r14 xorq %r15,%r15 movq %r8,%rax addq %r9,%r9 adcq %r10,%r10 adcq %r11,%r11 adcq %r12,%r12 adcq %r13,%r13 adcq %r14,%r14 adcq $0,%r15 mulq %rax movq %rax,%r8 .byte 102,72,15,126,200 movq %rdx,%rbp mulq %rax addq %rbp,%r9 adcq %rax,%r10 .byte 102,72,15,126,208 adcq $0,%rdx movq %rdx,%rbp mulq %rax addq %rbp,%r11 adcq %rax,%r12 .byte 102,72,15,126,216 adcq $0,%rdx movq %rdx,%rbp movq %r8,%rcx imulq 32(%rsi),%r8 mulq %rax addq %rbp,%r13 adcq %rax,%r14 movq 0(%rsi),%rax adcq %rdx,%r15 mulq %r8 movq %r8,%rbp addq %rax,%rcx movq 8(%rsi),%rax adcq %rdx,%rcx subq %r8,%r10 sbbq $0,%rbp mulq %r8 addq %rcx,%r9 adcq $0,%rdx addq %rax,%r9 movq %r8,%rax adcq %rdx,%r10 movq %r8,%rdx adcq $0,%rbp movq %r9,%rcx imulq 32(%rsi),%r9 shlq $32,%rax shrq $32,%rdx subq %rax,%r11 movq 0(%rsi),%rax sbbq %rdx,%r8 addq %rbp,%r11 adcq $0,%r8 mulq %r9 movq %r9,%rbp addq %rax,%rcx movq 8(%rsi),%rax adcq %rdx,%rcx subq %r9,%r11 sbbq $0,%rbp mulq %r9 addq %rcx,%r10 adcq $0,%rdx addq %rax,%r10 movq %r9,%rax adcq %rdx,%r11 movq %r9,%rdx adcq $0,%rbp movq %r10,%rcx imulq 32(%rsi),%r10 shlq $32,%rax shrq $32,%rdx subq %rax,%r8 movq 0(%rsi),%rax sbbq %rdx,%r9 addq %rbp,%r8 adcq $0,%r9 mulq %r10 movq %r10,%rbp addq %rax,%rcx movq 8(%rsi),%rax adcq %rdx,%rcx subq %r10,%r8 sbbq $0,%rbp mulq %r10 addq %rcx,%r11 adcq $0,%rdx addq %rax,%r11 movq %r10,%rax adcq %rdx,%r8 movq %r10,%rdx adcq $0,%rbp movq %r11,%rcx imulq 32(%rsi),%r11 shlq $32,%rax shrq $32,%rdx subq %rax,%r9 movq 0(%rsi),%rax sbbq %rdx,%r10 addq %rbp,%r9 adcq $0,%r10 mulq %r11 movq %r11,%rbp addq %rax,%rcx movq 8(%rsi),%rax adcq %rdx,%rcx subq %r11,%r9 sbbq $0,%rbp mulq %r11 addq %rcx,%r8 adcq $0,%rdx addq %rax,%r8 movq %r11,%rax adcq %rdx,%r9 movq %r11,%rdx adcq $0,%rbp shlq $32,%rax shrq $32,%rdx subq %rax,%r10 sbbq %rdx,%r11 addq %rbp,%r10 adcq $0,%r11 xorq %rdx,%rdx addq %r12,%r8 adcq %r13,%r9 movq %r8,%r12 adcq %r14,%r10 adcq %r15,%r11 movq %r9,%rax adcq $0,%rdx subq 0(%rsi),%r8 movq %r10,%r14 sbbq 8(%rsi),%r9 sbbq 16(%rsi),%r10 movq %r11,%r15 sbbq 24(%rsi),%r11 sbbq $0,%rdx cmovcq %r12,%r8 cmovncq %r9,%rax cmovncq %r10,%r14 cmovncq %r11,%r15 decq %rbx jnz L$oop_ord_sqr movq %r8,0(%rdi) movq %rax,8(%rdi) pxor %xmm1,%xmm1 movq %r14,16(%rdi) pxor %xmm2,%xmm2 movq %r15,24(%rdi) pxor %xmm3,%xmm3 movq 0(%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r13 movq 24(%rsp),%r12 movq 32(%rsp),%rbx movq 40(%rsp),%rbp leaq 48(%rsp),%rsp L$ord_sqr_epilogue: .byte 0xf3,0xc3 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .p2align 5 ecp_nistz256_ord_mul_montx: L$ecp_nistz256_ord_mul_montx: pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$ord_mulx_body: movq %rdx,%rbx movq 0(%rdx),%rdx movq 0(%rsi),%r9 movq 8(%rsi),%r10 movq 16(%rsi),%r11 movq 24(%rsi),%r12 leaq -128(%rsi),%rsi leaq L$ord-128(%rip),%r14 movq L$ordK(%rip),%r15 mulxq %r9,%r8,%r9 mulxq %r10,%rcx,%r10 mulxq %r11,%rbp,%r11 addq %rcx,%r9 mulxq %r12,%rcx,%r12 movq %r8,%rdx mulxq %r15,%rdx,%rax adcq %rbp,%r10 adcq %rcx,%r11 adcq $0,%r12 xorq %r13,%r13 mulxq 0+128(%r14),%rcx,%rbp adcxq %rcx,%r8 adoxq %rbp,%r9 mulxq 8+128(%r14),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 16+128(%r14),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 24+128(%r14),%rcx,%rbp movq 8(%rbx),%rdx adcxq %rcx,%r11 adoxq %rbp,%r12 adcxq %r8,%r12 adoxq %r8,%r13 adcq $0,%r13 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 24+128(%rsi),%rcx,%rbp movq %r9,%rdx mulxq %r15,%rdx,%rax adcxq %rcx,%r12 adoxq %rbp,%r13 adcxq %r8,%r13 adoxq %r8,%r8 adcq $0,%r8 mulxq 0+128(%r14),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 8+128(%r14),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 16+128(%r14),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 24+128(%r14),%rcx,%rbp movq 16(%rbx),%rdx adcxq %rcx,%r12 adoxq %rbp,%r13 adcxq %r9,%r13 adoxq %r9,%r8 adcq $0,%r8 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 24+128(%rsi),%rcx,%rbp movq %r10,%rdx mulxq %r15,%rdx,%rax adcxq %rcx,%r13 adoxq %rbp,%r8 adcxq %r9,%r8 adoxq %r9,%r9 adcq $0,%r9 mulxq 0+128(%r14),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 8+128(%r14),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 16+128(%r14),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 24+128(%r14),%rcx,%rbp movq 24(%rbx),%rdx adcxq %rcx,%r13 adoxq %rbp,%r8 adcxq %r10,%r8 adoxq %r10,%r9 adcq $0,%r9 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r13 adoxq %rbp,%r8 mulxq 24+128(%rsi),%rcx,%rbp movq %r11,%rdx mulxq %r15,%rdx,%rax adcxq %rcx,%r8 adoxq %rbp,%r9 adcxq %r10,%r9 adoxq %r10,%r10 adcq $0,%r10 mulxq 0+128(%r14),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 8+128(%r14),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 16+128(%r14),%rcx,%rbp adcxq %rcx,%r13 adoxq %rbp,%r8 mulxq 24+128(%r14),%rcx,%rbp leaq 128(%r14),%r14 movq %r12,%rbx adcxq %rcx,%r8 adoxq %rbp,%r9 movq %r13,%rdx adcxq %r11,%r9 adoxq %r11,%r10 adcq $0,%r10 movq %r8,%rcx subq 0(%r14),%r12 sbbq 8(%r14),%r13 sbbq 16(%r14),%r8 movq %r9,%rbp sbbq 24(%r14),%r9 sbbq $0,%r10 cmovcq %rbx,%r12 cmovcq %rdx,%r13 cmovcq %rcx,%r8 cmovcq %rbp,%r9 movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 0(%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r13 movq 24(%rsp),%r12 movq 32(%rsp),%rbx movq 40(%rsp),%rbp leaq 48(%rsp),%rsp L$ord_mulx_epilogue: .byte 0xf3,0xc3 .p2align 5 ecp_nistz256_ord_sqr_montx: L$ecp_nistz256_ord_sqr_montx: pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$ord_sqrx_body: movq %rdx,%rbx movq 0(%rsi),%rdx movq 8(%rsi),%r14 movq 16(%rsi),%r15 movq 24(%rsi),%r8 leaq L$ord(%rip),%rsi jmp L$oop_ord_sqrx .p2align 5 L$oop_ord_sqrx: mulxq %r14,%r9,%r10 mulxq %r15,%rcx,%r11 movq %rdx,%rax .byte 102,73,15,110,206 mulxq %r8,%rbp,%r12 movq %r14,%rdx addq %rcx,%r10 .byte 102,73,15,110,215 adcq %rbp,%r11 adcq $0,%r12 xorq %r13,%r13 mulxq %r15,%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq %r8,%rcx,%rbp movq %r15,%rdx adcxq %rcx,%r12 adoxq %rbp,%r13 adcq $0,%r13 mulxq %r8,%rcx,%r14 movq %rax,%rdx .byte 102,73,15,110,216 xorq %r15,%r15 adcxq %r9,%r9 adoxq %rcx,%r13 adcxq %r10,%r10 adoxq %r15,%r14 mulxq %rdx,%r8,%rbp .byte 102,72,15,126,202 adcxq %r11,%r11 adoxq %rbp,%r9 adcxq %r12,%r12 mulxq %rdx,%rcx,%rax .byte 102,72,15,126,210 adcxq %r13,%r13 adoxq %rcx,%r10 adcxq %r14,%r14 mulxq %rdx,%rcx,%rbp .byte 0x67 .byte 102,72,15,126,218 adoxq %rax,%r11 adcxq %r15,%r15 adoxq %rcx,%r12 adoxq %rbp,%r13 mulxq %rdx,%rcx,%rax adoxq %rcx,%r14 adoxq %rax,%r15 movq %r8,%rdx mulxq 32(%rsi),%rdx,%rcx xorq %rax,%rax mulxq 0(%rsi),%rcx,%rbp adcxq %rcx,%r8 adoxq %rbp,%r9 mulxq 8(%rsi),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 16(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 24(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r8 adcxq %rax,%r8 movq %r9,%rdx mulxq 32(%rsi),%rdx,%rcx mulxq 0(%rsi),%rcx,%rbp adoxq %rcx,%r9 adcxq %rbp,%r10 mulxq 8(%rsi),%rcx,%rbp adoxq %rcx,%r10 adcxq %rbp,%r11 mulxq 16(%rsi),%rcx,%rbp adoxq %rcx,%r11 adcxq %rbp,%r8 mulxq 24(%rsi),%rcx,%rbp adoxq %rcx,%r8 adcxq %rbp,%r9 adoxq %rax,%r9 movq %r10,%rdx mulxq 32(%rsi),%rdx,%rcx mulxq 0(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 8(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r8 mulxq 16(%rsi),%rcx,%rbp adcxq %rcx,%r8 adoxq %rbp,%r9 mulxq 24(%rsi),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 adcxq %rax,%r10 movq %r11,%rdx mulxq 32(%rsi),%rdx,%rcx mulxq 0(%rsi),%rcx,%rbp adoxq %rcx,%r11 adcxq %rbp,%r8 mulxq 8(%rsi),%rcx,%rbp adoxq %rcx,%r8 adcxq %rbp,%r9 mulxq 16(%rsi),%rcx,%rbp adoxq %rcx,%r9 adcxq %rbp,%r10 mulxq 24(%rsi),%rcx,%rbp adoxq %rcx,%r10 adcxq %rbp,%r11 adoxq %rax,%r11 addq %r8,%r12 adcq %r13,%r9 movq %r12,%rdx adcq %r14,%r10 adcq %r15,%r11 movq %r9,%r14 adcq $0,%rax subq 0(%rsi),%r12 movq %r10,%r15 sbbq 8(%rsi),%r9 sbbq 16(%rsi),%r10 movq %r11,%r8 sbbq 24(%rsi),%r11 sbbq $0,%rax cmovncq %r12,%rdx cmovncq %r9,%r14 cmovncq %r10,%r15 cmovncq %r11,%r8 decq %rbx jnz L$oop_ord_sqrx movq %rdx,0(%rdi) movq %r14,8(%rdi) pxor %xmm1,%xmm1 movq %r15,16(%rdi) pxor %xmm2,%xmm2 movq %r8,24(%rdi) pxor %xmm3,%xmm3 movq 0(%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r13 movq 24(%rsp),%r12 movq 32(%rsp),%rbx movq 40(%rsp),%rbp leaq 48(%rsp),%rsp L$ord_sqrx_epilogue: .byte 0xf3,0xc3 #endif .globl _ecp_nistz256_mul_mont .private_extern _ecp_nistz256_mul_mont .p2align 5 _ecp_nistz256_mul_mont: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx #endif L$mul_mont: pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$mul_body: #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX cmpl $0x80100,%ecx je L$mul_montx #endif movq %rdx,%rbx movq 0(%rdx),%rax movq 0(%rsi),%r9 movq 8(%rsi),%r10 movq 16(%rsi),%r11 movq 24(%rsi),%r12 call __ecp_nistz256_mul_montq #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX jmp L$mul_mont_done .p2align 5 L$mul_montx: movq %rdx,%rbx movq 0(%rdx),%rdx movq 0(%rsi),%r9 movq 8(%rsi),%r10 movq 16(%rsi),%r11 movq 24(%rsi),%r12 leaq -128(%rsi),%rsi call __ecp_nistz256_mul_montx #endif L$mul_mont_done: movq 0(%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r13 movq 24(%rsp),%r12 movq 32(%rsp),%rbx movq 40(%rsp),%rbp leaq 48(%rsp),%rsp L$mul_epilogue: .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_mul_montq: movq %rax,%rbp mulq %r9 movq L$poly+8(%rip),%r14 movq %rax,%r8 movq %rbp,%rax movq %rdx,%r9 mulq %r10 movq L$poly+24(%rip),%r15 addq %rax,%r9 movq %rbp,%rax adcq $0,%rdx movq %rdx,%r10 mulq %r11 addq %rax,%r10 movq %rbp,%rax adcq $0,%rdx movq %rdx,%r11 mulq %r12 addq %rax,%r11 movq %r8,%rax adcq $0,%rdx xorq %r13,%r13 movq %rdx,%r12 movq %r8,%rbp shlq $32,%r8 mulq %r15 shrq $32,%rbp addq %r8,%r9 adcq %rbp,%r10 adcq %rax,%r11 movq 8(%rbx),%rax adcq %rdx,%r12 adcq $0,%r13 xorq %r8,%r8 movq %rax,%rbp mulq 0(%rsi) addq %rax,%r9 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 8(%rsi) addq %rcx,%r10 adcq $0,%rdx addq %rax,%r10 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 16(%rsi) addq %rcx,%r11 adcq $0,%rdx addq %rax,%r11 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 24(%rsi) addq %rcx,%r12 adcq $0,%rdx addq %rax,%r12 movq %r9,%rax adcq %rdx,%r13 adcq $0,%r8 movq %r9,%rbp shlq $32,%r9 mulq %r15 shrq $32,%rbp addq %r9,%r10 adcq %rbp,%r11 adcq %rax,%r12 movq 16(%rbx),%rax adcq %rdx,%r13 adcq $0,%r8 xorq %r9,%r9 movq %rax,%rbp mulq 0(%rsi) addq %rax,%r10 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 8(%rsi) addq %rcx,%r11 adcq $0,%rdx addq %rax,%r11 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 16(%rsi) addq %rcx,%r12 adcq $0,%rdx addq %rax,%r12 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 24(%rsi) addq %rcx,%r13 adcq $0,%rdx addq %rax,%r13 movq %r10,%rax adcq %rdx,%r8 adcq $0,%r9 movq %r10,%rbp shlq $32,%r10 mulq %r15 shrq $32,%rbp addq %r10,%r11 adcq %rbp,%r12 adcq %rax,%r13 movq 24(%rbx),%rax adcq %rdx,%r8 adcq $0,%r9 xorq %r10,%r10 movq %rax,%rbp mulq 0(%rsi) addq %rax,%r11 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 8(%rsi) addq %rcx,%r12 adcq $0,%rdx addq %rax,%r12 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 16(%rsi) addq %rcx,%r13 adcq $0,%rdx addq %rax,%r13 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 24(%rsi) addq %rcx,%r8 adcq $0,%rdx addq %rax,%r8 movq %r11,%rax adcq %rdx,%r9 adcq $0,%r10 movq %r11,%rbp shlq $32,%r11 mulq %r15 shrq $32,%rbp addq %r11,%r12 adcq %rbp,%r13 movq %r12,%rcx adcq %rax,%r8 adcq %rdx,%r9 movq %r13,%rbp adcq $0,%r10 subq $-1,%r12 movq %r8,%rbx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%rdx sbbq %r15,%r9 sbbq $0,%r10 cmovcq %rcx,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rbx,%r8 movq %r13,8(%rdi) cmovcq %rdx,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .globl _ecp_nistz256_sqr_mont .private_extern _ecp_nistz256_sqr_mont .p2align 5 _ecp_nistz256_sqr_mont: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx #endif pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$sqr_body: #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX cmpl $0x80100,%ecx je L$sqr_montx #endif movq 0(%rsi),%rax movq 8(%rsi),%r14 movq 16(%rsi),%r15 movq 24(%rsi),%r8 call __ecp_nistz256_sqr_montq #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX jmp L$sqr_mont_done .p2align 5 L$sqr_montx: movq 0(%rsi),%rdx movq 8(%rsi),%r14 movq 16(%rsi),%r15 movq 24(%rsi),%r8 leaq -128(%rsi),%rsi call __ecp_nistz256_sqr_montx #endif L$sqr_mont_done: movq 0(%rsp),%r15 movq 8(%rsp),%r14 movq 16(%rsp),%r13 movq 24(%rsp),%r12 movq 32(%rsp),%rbx movq 40(%rsp),%rbp leaq 48(%rsp),%rsp L$sqr_epilogue: .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_sqr_montq: movq %rax,%r13 mulq %r14 movq %rax,%r9 movq %r15,%rax movq %rdx,%r10 mulq %r13 addq %rax,%r10 movq %r8,%rax adcq $0,%rdx movq %rdx,%r11 mulq %r13 addq %rax,%r11 movq %r15,%rax adcq $0,%rdx movq %rdx,%r12 mulq %r14 addq %rax,%r11 movq %r8,%rax adcq $0,%rdx movq %rdx,%rbp mulq %r14 addq %rax,%r12 movq %r8,%rax adcq $0,%rdx addq %rbp,%r12 movq %rdx,%r13 adcq $0,%r13 mulq %r15 xorq %r15,%r15 addq %rax,%r13 movq 0(%rsi),%rax movq %rdx,%r14 adcq $0,%r14 addq %r9,%r9 adcq %r10,%r10 adcq %r11,%r11 adcq %r12,%r12 adcq %r13,%r13 adcq %r14,%r14 adcq $0,%r15 mulq %rax movq %rax,%r8 movq 8(%rsi),%rax movq %rdx,%rcx mulq %rax addq %rcx,%r9 adcq %rax,%r10 movq 16(%rsi),%rax adcq $0,%rdx movq %rdx,%rcx mulq %rax addq %rcx,%r11 adcq %rax,%r12 movq 24(%rsi),%rax adcq $0,%rdx movq %rdx,%rcx mulq %rax addq %rcx,%r13 adcq %rax,%r14 movq %r8,%rax adcq %rdx,%r15 movq L$poly+8(%rip),%rsi movq L$poly+24(%rip),%rbp movq %r8,%rcx shlq $32,%r8 mulq %rbp shrq $32,%rcx addq %r8,%r9 adcq %rcx,%r10 adcq %rax,%r11 movq %r9,%rax adcq $0,%rdx movq %r9,%rcx shlq $32,%r9 movq %rdx,%r8 mulq %rbp shrq $32,%rcx addq %r9,%r10 adcq %rcx,%r11 adcq %rax,%r8 movq %r10,%rax adcq $0,%rdx movq %r10,%rcx shlq $32,%r10 movq %rdx,%r9 mulq %rbp shrq $32,%rcx addq %r10,%r11 adcq %rcx,%r8 adcq %rax,%r9 movq %r11,%rax adcq $0,%rdx movq %r11,%rcx shlq $32,%r11 movq %rdx,%r10 mulq %rbp shrq $32,%rcx addq %r11,%r8 adcq %rcx,%r9 adcq %rax,%r10 adcq $0,%rdx xorq %r11,%r11 addq %r8,%r12 adcq %r9,%r13 movq %r12,%r8 adcq %r10,%r14 adcq %rdx,%r15 movq %r13,%r9 adcq $0,%r11 subq $-1,%r12 movq %r14,%r10 sbbq %rsi,%r13 sbbq $0,%r14 movq %r15,%rcx sbbq %rbp,%r15 sbbq $0,%r11 cmovcq %r8,%r12 cmovcq %r9,%r13 movq %r12,0(%rdi) cmovcq %r10,%r14 movq %r13,8(%rdi) cmovcq %rcx,%r15 movq %r14,16(%rdi) movq %r15,24(%rdi) .byte 0xf3,0xc3 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .p2align 5 __ecp_nistz256_mul_montx: mulxq %r9,%r8,%r9 mulxq %r10,%rcx,%r10 movq $32,%r14 xorq %r13,%r13 mulxq %r11,%rbp,%r11 movq L$poly+24(%rip),%r15 adcq %rcx,%r9 mulxq %r12,%rcx,%r12 movq %r8,%rdx adcq %rbp,%r10 shlxq %r14,%r8,%rbp adcq %rcx,%r11 shrxq %r14,%r8,%rcx adcq $0,%r12 addq %rbp,%r9 adcq %rcx,%r10 mulxq %r15,%rcx,%rbp movq 8(%rbx),%rdx adcq %rcx,%r11 adcq %rbp,%r12 adcq $0,%r13 xorq %r8,%r8 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 24+128(%rsi),%rcx,%rbp movq %r9,%rdx adcxq %rcx,%r12 shlxq %r14,%r9,%rcx adoxq %rbp,%r13 shrxq %r14,%r9,%rbp adcxq %r8,%r13 adoxq %r8,%r8 adcq $0,%r8 addq %rcx,%r10 adcq %rbp,%r11 mulxq %r15,%rcx,%rbp movq 16(%rbx),%rdx adcq %rcx,%r12 adcq %rbp,%r13 adcq $0,%r8 xorq %r9,%r9 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 24+128(%rsi),%rcx,%rbp movq %r10,%rdx adcxq %rcx,%r13 shlxq %r14,%r10,%rcx adoxq %rbp,%r8 shrxq %r14,%r10,%rbp adcxq %r9,%r8 adoxq %r9,%r9 adcq $0,%r9 addq %rcx,%r11 adcq %rbp,%r12 mulxq %r15,%rcx,%rbp movq 24(%rbx),%rdx adcq %rcx,%r13 adcq %rbp,%r8 adcq $0,%r9 xorq %r10,%r10 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r13 adoxq %rbp,%r8 mulxq 24+128(%rsi),%rcx,%rbp movq %r11,%rdx adcxq %rcx,%r8 shlxq %r14,%r11,%rcx adoxq %rbp,%r9 shrxq %r14,%r11,%rbp adcxq %r10,%r9 adoxq %r10,%r10 adcq $0,%r10 addq %rcx,%r12 adcq %rbp,%r13 mulxq %r15,%rcx,%rbp movq %r12,%rbx movq L$poly+8(%rip),%r14 adcq %rcx,%r8 movq %r13,%rdx adcq %rbp,%r9 adcq $0,%r10 xorl %eax,%eax movq %r8,%rcx sbbq $-1,%r12 sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%rbp sbbq %r15,%r9 sbbq $0,%r10 cmovcq %rbx,%r12 cmovcq %rdx,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %rbp,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_sqr_montx: mulxq %r14,%r9,%r10 mulxq %r15,%rcx,%r11 xorl %eax,%eax adcq %rcx,%r10 mulxq %r8,%rbp,%r12 movq %r14,%rdx adcq %rbp,%r11 adcq $0,%r12 xorq %r13,%r13 mulxq %r15,%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq %r8,%rcx,%rbp movq %r15,%rdx adcxq %rcx,%r12 adoxq %rbp,%r13 adcq $0,%r13 mulxq %r8,%rcx,%r14 movq 0+128(%rsi),%rdx xorq %r15,%r15 adcxq %r9,%r9 adoxq %rcx,%r13 adcxq %r10,%r10 adoxq %r15,%r14 mulxq %rdx,%r8,%rbp movq 8+128(%rsi),%rdx adcxq %r11,%r11 adoxq %rbp,%r9 adcxq %r12,%r12 mulxq %rdx,%rcx,%rax movq 16+128(%rsi),%rdx adcxq %r13,%r13 adoxq %rcx,%r10 adcxq %r14,%r14 .byte 0x67 mulxq %rdx,%rcx,%rbp movq 24+128(%rsi),%rdx adoxq %rax,%r11 adcxq %r15,%r15 adoxq %rcx,%r12 movq $32,%rsi adoxq %rbp,%r13 .byte 0x67,0x67 mulxq %rdx,%rcx,%rax movq L$poly+24(%rip),%rdx adoxq %rcx,%r14 shlxq %rsi,%r8,%rcx adoxq %rax,%r15 shrxq %rsi,%r8,%rax movq %rdx,%rbp addq %rcx,%r9 adcq %rax,%r10 mulxq %r8,%rcx,%r8 adcq %rcx,%r11 shlxq %rsi,%r9,%rcx adcq $0,%r8 shrxq %rsi,%r9,%rax addq %rcx,%r10 adcq %rax,%r11 mulxq %r9,%rcx,%r9 adcq %rcx,%r8 shlxq %rsi,%r10,%rcx adcq $0,%r9 shrxq %rsi,%r10,%rax addq %rcx,%r11 adcq %rax,%r8 mulxq %r10,%rcx,%r10 adcq %rcx,%r9 shlxq %rsi,%r11,%rcx adcq $0,%r10 shrxq %rsi,%r11,%rax addq %rcx,%r8 adcq %rax,%r9 mulxq %r11,%rcx,%r11 adcq %rcx,%r10 adcq $0,%r11 xorq %rdx,%rdx addq %r8,%r12 movq L$poly+8(%rip),%rsi adcq %r9,%r13 movq %r12,%r8 adcq %r10,%r14 adcq %r11,%r15 movq %r13,%r9 adcq $0,%rdx subq $-1,%r12 movq %r14,%r10 sbbq %rsi,%r13 sbbq $0,%r14 movq %r15,%r11 sbbq %rbp,%r15 sbbq $0,%rdx cmovcq %r8,%r12 cmovcq %r9,%r13 movq %r12,0(%rdi) cmovcq %r10,%r14 movq %r13,8(%rdi) cmovcq %r11,%r15 movq %r14,16(%rdi) movq %r15,24(%rdi) .byte 0xf3,0xc3 #endif .globl _ecp_nistz256_select_w5 .private_extern _ecp_nistz256_select_w5 .p2align 5 _ecp_nistz256_select_w5: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rax movq 8(%rax),%rax testl $32,%eax jnz L$avx2_select_w5 #endif movdqa L$One(%rip),%xmm0 movd %edx,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 movdqa %xmm0,%xmm8 pshufd $0,%xmm1,%xmm1 movq $16,%rax L$select_loop_sse_w5: movdqa %xmm8,%xmm15 paddd %xmm0,%xmm8 pcmpeqd %xmm1,%xmm15 movdqa 0(%rsi),%xmm9 movdqa 16(%rsi),%xmm10 movdqa 32(%rsi),%xmm11 movdqa 48(%rsi),%xmm12 movdqa 64(%rsi),%xmm13 movdqa 80(%rsi),%xmm14 leaq 96(%rsi),%rsi pand %xmm15,%xmm9 pand %xmm15,%xmm10 por %xmm9,%xmm2 pand %xmm15,%xmm11 por %xmm10,%xmm3 pand %xmm15,%xmm12 por %xmm11,%xmm4 pand %xmm15,%xmm13 por %xmm12,%xmm5 pand %xmm15,%xmm14 por %xmm13,%xmm6 por %xmm14,%xmm7 decq %rax jnz L$select_loop_sse_w5 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqu %xmm4,32(%rdi) movdqu %xmm5,48(%rdi) movdqu %xmm6,64(%rdi) movdqu %xmm7,80(%rdi) .byte 0xf3,0xc3 L$SEH_end_ecp_nistz256_select_w5: .globl _ecp_nistz256_select_w7 .private_extern _ecp_nistz256_select_w7 .p2align 5 _ecp_nistz256_select_w7: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rax movq 8(%rax),%rax testl $32,%eax jnz L$avx2_select_w7 #endif movdqa L$One(%rip),%xmm8 movd %edx,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 movdqa %xmm8,%xmm0 pshufd $0,%xmm1,%xmm1 movq $64,%rax L$select_loop_sse_w7: movdqa %xmm8,%xmm15 paddd %xmm0,%xmm8 movdqa 0(%rsi),%xmm9 movdqa 16(%rsi),%xmm10 pcmpeqd %xmm1,%xmm15 movdqa 32(%rsi),%xmm11 movdqa 48(%rsi),%xmm12 leaq 64(%rsi),%rsi pand %xmm15,%xmm9 pand %xmm15,%xmm10 por %xmm9,%xmm2 pand %xmm15,%xmm11 por %xmm10,%xmm3 pand %xmm15,%xmm12 por %xmm11,%xmm4 prefetcht0 255(%rsi) por %xmm12,%xmm5 decq %rax jnz L$select_loop_sse_w7 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqu %xmm4,32(%rdi) movdqu %xmm5,48(%rdi) .byte 0xf3,0xc3 L$SEH_end_ecp_nistz256_select_w7: #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .p2align 5 ecp_nistz256_avx2_select_w5: L$avx2_select_w5: vzeroupper vmovdqa L$Two(%rip),%ymm0 vpxor %ymm2,%ymm2,%ymm2 vpxor %ymm3,%ymm3,%ymm3 vpxor %ymm4,%ymm4,%ymm4 vmovdqa L$One(%rip),%ymm5 vmovdqa L$Two(%rip),%ymm10 vmovd %edx,%xmm1 vpermd %ymm1,%ymm2,%ymm1 movq $8,%rax L$select_loop_avx2_w5: vmovdqa 0(%rsi),%ymm6 vmovdqa 32(%rsi),%ymm7 vmovdqa 64(%rsi),%ymm8 vmovdqa 96(%rsi),%ymm11 vmovdqa 128(%rsi),%ymm12 vmovdqa 160(%rsi),%ymm13 vpcmpeqd %ymm1,%ymm5,%ymm9 vpcmpeqd %ymm1,%ymm10,%ymm14 vpaddd %ymm0,%ymm5,%ymm5 vpaddd %ymm0,%ymm10,%ymm10 leaq 192(%rsi),%rsi vpand %ymm9,%ymm6,%ymm6 vpand %ymm9,%ymm7,%ymm7 vpand %ymm9,%ymm8,%ymm8 vpand %ymm14,%ymm11,%ymm11 vpand %ymm14,%ymm12,%ymm12 vpand %ymm14,%ymm13,%ymm13 vpxor %ymm6,%ymm2,%ymm2 vpxor %ymm7,%ymm3,%ymm3 vpxor %ymm8,%ymm4,%ymm4 vpxor %ymm11,%ymm2,%ymm2 vpxor %ymm12,%ymm3,%ymm3 vpxor %ymm13,%ymm4,%ymm4 decq %rax jnz L$select_loop_avx2_w5 vmovdqu %ymm2,0(%rdi) vmovdqu %ymm3,32(%rdi) vmovdqu %ymm4,64(%rdi) vzeroupper .byte 0xf3,0xc3 L$SEH_end_ecp_nistz256_avx2_select_w5: #endif #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .globl _ecp_nistz256_avx2_select_w7 .private_extern _ecp_nistz256_avx2_select_w7 .p2align 5 _ecp_nistz256_avx2_select_w7: L$avx2_select_w7: _CET_ENDBR vzeroupper vmovdqa L$Three(%rip),%ymm0 vpxor %ymm2,%ymm2,%ymm2 vpxor %ymm3,%ymm3,%ymm3 vmovdqa L$One(%rip),%ymm4 vmovdqa L$Two(%rip),%ymm8 vmovdqa L$Three(%rip),%ymm12 vmovd %edx,%xmm1 vpermd %ymm1,%ymm2,%ymm1 movq $21,%rax L$select_loop_avx2_w7: vmovdqa 0(%rsi),%ymm5 vmovdqa 32(%rsi),%ymm6 vmovdqa 64(%rsi),%ymm9 vmovdqa 96(%rsi),%ymm10 vmovdqa 128(%rsi),%ymm13 vmovdqa 160(%rsi),%ymm14 vpcmpeqd %ymm1,%ymm4,%ymm7 vpcmpeqd %ymm1,%ymm8,%ymm11 vpcmpeqd %ymm1,%ymm12,%ymm15 vpaddd %ymm0,%ymm4,%ymm4 vpaddd %ymm0,%ymm8,%ymm8 vpaddd %ymm0,%ymm12,%ymm12 leaq 192(%rsi),%rsi vpand %ymm7,%ymm5,%ymm5 vpand %ymm7,%ymm6,%ymm6 vpand %ymm11,%ymm9,%ymm9 vpand %ymm11,%ymm10,%ymm10 vpand %ymm15,%ymm13,%ymm13 vpand %ymm15,%ymm14,%ymm14 vpxor %ymm5,%ymm2,%ymm2 vpxor %ymm6,%ymm3,%ymm3 vpxor %ymm9,%ymm2,%ymm2 vpxor %ymm10,%ymm3,%ymm3 vpxor %ymm13,%ymm2,%ymm2 vpxor %ymm14,%ymm3,%ymm3 decq %rax jnz L$select_loop_avx2_w7 vmovdqa 0(%rsi),%ymm5 vmovdqa 32(%rsi),%ymm6 vpcmpeqd %ymm1,%ymm4,%ymm7 vpand %ymm7,%ymm5,%ymm5 vpand %ymm7,%ymm6,%ymm6 vpxor %ymm5,%ymm2,%ymm2 vpxor %ymm6,%ymm3,%ymm3 vmovdqu %ymm2,0(%rdi) vmovdqu %ymm3,32(%rdi) vzeroupper .byte 0xf3,0xc3 L$SEH_end_ecp_nistz256_avx2_select_w7: #endif .p2align 5 __ecp_nistz256_add_toq: xorq %r11,%r11 addq 0(%rbx),%r12 adcq 8(%rbx),%r13 movq %r12,%rax adcq 16(%rbx),%r8 adcq 24(%rbx),%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_sub_fromq: subq 0(%rbx),%r12 sbbq 8(%rbx),%r13 movq %r12,%rax sbbq 16(%rbx),%r8 sbbq 24(%rbx),%r9 movq %r13,%rbp sbbq %r11,%r11 addq $-1,%r12 movq %r8,%rcx adcq %r14,%r13 adcq $0,%r8 movq %r9,%r10 adcq %r15,%r9 testq %r11,%r11 cmovzq %rax,%r12 cmovzq %rbp,%r13 movq %r12,0(%rdi) cmovzq %rcx,%r8 movq %r13,8(%rdi) cmovzq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_subq: subq %r12,%rax sbbq %r13,%rbp movq %rax,%r12 sbbq %r8,%rcx sbbq %r9,%r10 movq %rbp,%r13 sbbq %r11,%r11 addq $-1,%rax movq %rcx,%r8 adcq %r14,%rbp adcq $0,%rcx movq %r10,%r9 adcq %r15,%r10 testq %r11,%r11 cmovnzq %rax,%r12 cmovnzq %rbp,%r13 cmovnzq %rcx,%r8 cmovnzq %r10,%r9 .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_mul_by_2q: xorq %r11,%r11 addq %r12,%r12 adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .globl _ecp_nistz256_point_double .private_extern _ecp_nistz256_point_double .p2align 5 _ecp_nistz256_point_double: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je L$point_doublex #endif pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $160+8,%rsp L$point_doubleq_body: L$point_double_shortcutq: movdqu 0(%rsi),%xmm0 movq %rsi,%rbx movdqu 16(%rsi),%xmm1 movq 32+0(%rsi),%r12 movq 32+8(%rsi),%r13 movq 32+16(%rsi),%r8 movq 32+24(%rsi),%r9 movq L$poly+8(%rip),%r14 movq L$poly+24(%rip),%r15 movdqa %xmm0,96(%rsp) movdqa %xmm1,96+16(%rsp) leaq 32(%rdi),%r10 leaq 64(%rdi),%r11 .byte 102,72,15,110,199 .byte 102,73,15,110,202 .byte 102,73,15,110,211 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_by_2q movq 64+0(%rsi),%rax movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 leaq 64-0(%rsi),%rsi leaq 64(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 0+0(%rsp),%rax movq 8+0(%rsp),%r14 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 leaq 0(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 32(%rbx),%rax movq 64+0(%rbx),%r9 movq 64+8(%rbx),%r10 movq 64+16(%rbx),%r11 movq 64+24(%rbx),%r12 leaq 64-0(%rbx),%rsi leaq 32(%rbx),%rbx .byte 102,72,15,126,215 call __ecp_nistz256_mul_montq call __ecp_nistz256_mul_by_2q movq 96+0(%rsp),%r12 movq 96+8(%rsp),%r13 leaq 64(%rsp),%rbx movq 96+16(%rsp),%r8 movq 96+24(%rsp),%r9 leaq 32(%rsp),%rdi call __ecp_nistz256_add_toq movq 96+0(%rsp),%r12 movq 96+8(%rsp),%r13 leaq 64(%rsp),%rbx movq 96+16(%rsp),%r8 movq 96+24(%rsp),%r9 leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 0+0(%rsp),%rax movq 8+0(%rsp),%r14 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 .byte 102,72,15,126,207 call __ecp_nistz256_sqr_montq xorq %r9,%r9 movq %r12,%rax addq $-1,%r12 movq %r13,%r10 adcq %rsi,%r13 movq %r14,%rcx adcq $0,%r14 movq %r15,%r8 adcq %rbp,%r15 adcq $0,%r9 xorq %rsi,%rsi testq $1,%rax cmovzq %rax,%r12 cmovzq %r10,%r13 cmovzq %rcx,%r14 cmovzq %r8,%r15 cmovzq %rsi,%r9 movq %r13,%rax shrq $1,%r12 shlq $63,%rax movq %r14,%r10 shrq $1,%r13 orq %rax,%r12 shlq $63,%r10 movq %r15,%rcx shrq $1,%r14 orq %r10,%r13 shlq $63,%rcx movq %r12,0(%rdi) shrq $1,%r15 movq %r13,8(%rdi) shlq $63,%r9 orq %rcx,%r14 orq %r9,%r15 movq %r14,16(%rdi) movq %r15,24(%rdi) movq 64(%rsp),%rax leaq 64(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 128(%rsp),%rdi call __ecp_nistz256_mul_by_2q leaq 32(%rsp),%rbx leaq 32(%rsp),%rdi call __ecp_nistz256_add_toq movq 96(%rsp),%rax leaq 96(%rsp),%rbx movq 0+0(%rsp),%r9 movq 8+0(%rsp),%r10 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r11 movq 24+0(%rsp),%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 128(%rsp),%rdi call __ecp_nistz256_mul_by_2q movq 0+32(%rsp),%rax movq 8+32(%rsp),%r14 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r15 movq 24+32(%rsp),%r8 .byte 102,72,15,126,199 call __ecp_nistz256_sqr_montq leaq 128(%rsp),%rbx movq %r14,%r8 movq %r15,%r9 movq %rsi,%r14 movq %rbp,%r15 call __ecp_nistz256_sub_fromq movq 0+0(%rsp),%rax movq 0+8(%rsp),%rbp movq 0+16(%rsp),%rcx movq 0+24(%rsp),%r10 leaq 0(%rsp),%rdi call __ecp_nistz256_subq movq 32(%rsp),%rax leaq 32(%rsp),%rbx movq %r12,%r14 xorl %ecx,%ecx movq %r12,0+0(%rsp) movq %r13,%r10 movq %r13,0+8(%rsp) cmovzq %r8,%r11 movq %r8,0+16(%rsp) leaq 0-0(%rsp),%rsi cmovzq %r9,%r12 movq %r9,0+24(%rsp) movq %r14,%r9 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montq .byte 102,72,15,126,203 .byte 102,72,15,126,207 call __ecp_nistz256_sub_fromq leaq 160+56(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbx movq -8(%rsi),%rbp leaq (%rsi),%rsp L$point_doubleq_epilogue: .byte 0xf3,0xc3 .globl _ecp_nistz256_point_add .private_extern _ecp_nistz256_point_add .p2align 5 _ecp_nistz256_point_add: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je L$point_addx #endif pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $576+8,%rsp L$point_addq_body: movdqu 0(%rsi),%xmm0 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm3 movdqu 64(%rsi),%xmm4 movdqu 80(%rsi),%xmm5 movq %rsi,%rbx movq %rdx,%rsi movdqa %xmm0,384(%rsp) movdqa %xmm1,384+16(%rsp) movdqa %xmm2,416(%rsp) movdqa %xmm3,416+16(%rsp) movdqa %xmm4,448(%rsp) movdqa %xmm5,448+16(%rsp) por %xmm4,%xmm5 movdqu 0(%rsi),%xmm0 pshufd $0xb1,%xmm5,%xmm3 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 por %xmm3,%xmm5 movdqu 48(%rsi),%xmm3 movq 64+0(%rsi),%rax movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 movdqa %xmm0,480(%rsp) pshufd $0x1e,%xmm5,%xmm4 movdqa %xmm1,480+16(%rsp) movdqu 64(%rsi),%xmm0 movdqu 80(%rsi),%xmm1 movdqa %xmm2,512(%rsp) movdqa %xmm3,512+16(%rsp) por %xmm4,%xmm5 pxor %xmm4,%xmm4 por %xmm0,%xmm1 .byte 102,72,15,110,199 leaq 64-0(%rsi),%rsi movq %rax,544+0(%rsp) movq %r14,544+8(%rsp) movq %r15,544+16(%rsp) movq %r8,544+24(%rsp) leaq 96(%rsp),%rdi call __ecp_nistz256_sqr_montq pcmpeqd %xmm4,%xmm5 pshufd $0xb1,%xmm1,%xmm4 por %xmm1,%xmm4 pshufd $0,%xmm5,%xmm5 pshufd $0x1e,%xmm4,%xmm3 por %xmm3,%xmm4 pxor %xmm3,%xmm3 pcmpeqd %xmm3,%xmm4 pshufd $0,%xmm4,%xmm4 movq 64+0(%rbx),%rax movq 64+8(%rbx),%r14 movq 64+16(%rbx),%r15 movq 64+24(%rbx),%r8 .byte 102,72,15,110,203 leaq 64-0(%rbx),%rsi leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 544(%rsp),%rax leaq 544(%rsp),%rbx movq 0+96(%rsp),%r9 movq 8+96(%rsp),%r10 leaq 0+96(%rsp),%rsi movq 16+96(%rsp),%r11 movq 24+96(%rsp),%r12 leaq 224(%rsp),%rdi call __ecp_nistz256_mul_montq movq 448(%rsp),%rax leaq 448(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montq movq 416(%rsp),%rax leaq 416(%rsp),%rbx movq 0+224(%rsp),%r9 movq 8+224(%rsp),%r10 leaq 0+224(%rsp),%rsi movq 16+224(%rsp),%r11 movq 24+224(%rsp),%r12 leaq 224(%rsp),%rdi call __ecp_nistz256_mul_montq movq 512(%rsp),%rax leaq 512(%rsp),%rbx movq 0+256(%rsp),%r9 movq 8+256(%rsp),%r10 leaq 0+256(%rsp),%rsi movq 16+256(%rsp),%r11 movq 24+256(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 224(%rsp),%rbx leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromq orq %r13,%r12 movdqa %xmm4,%xmm2 orq %r8,%r12 orq %r9,%r12 por %xmm5,%xmm2 .byte 102,73,15,110,220 movq 384(%rsp),%rax leaq 384(%rsp),%rbx movq 0+96(%rsp),%r9 movq 8+96(%rsp),%r10 leaq 0+96(%rsp),%rsi movq 16+96(%rsp),%r11 movq 24+96(%rsp),%r12 leaq 160(%rsp),%rdi call __ecp_nistz256_mul_montq movq 480(%rsp),%rax leaq 480(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 192(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 160(%rsp),%rbx leaq 0(%rsp),%rdi call __ecp_nistz256_sub_fromq orq %r13,%r12 orq %r8,%r12 orq %r9,%r12 .byte 102,73,15,126,208 .byte 102,73,15,126,217 orq %r8,%r12 .byte 0x3e jnz L$add_proceedq testq %r9,%r9 jz L$add_doubleq .byte 102,72,15,126,199 pxor %xmm0,%xmm0 movdqu %xmm0,0(%rdi) movdqu %xmm0,16(%rdi) movdqu %xmm0,32(%rdi) movdqu %xmm0,48(%rdi) movdqu %xmm0,64(%rdi) movdqu %xmm0,80(%rdi) jmp L$add_doneq .p2align 5 L$add_doubleq: .byte 102,72,15,126,206 .byte 102,72,15,126,199 addq $416,%rsp jmp L$point_double_shortcutq .p2align 5 L$add_proceedq: movq 0+64(%rsp),%rax movq 8+64(%rsp),%r14 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r15 movq 24+64(%rsp),%r8 leaq 96(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 448(%rsp),%rax leaq 448(%rsp),%rbx movq 0+0(%rsp),%r9 movq 8+0(%rsp),%r10 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r11 movq 24+0(%rsp),%r12 leaq 352(%rsp),%rdi call __ecp_nistz256_mul_montq movq 0+0(%rsp),%rax movq 8+0(%rsp),%r14 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 544(%rsp),%rax leaq 544(%rsp),%rbx movq 0+352(%rsp),%r9 movq 8+352(%rsp),%r10 leaq 0+352(%rsp),%rsi movq 16+352(%rsp),%r11 movq 24+352(%rsp),%r12 leaq 352(%rsp),%rdi call __ecp_nistz256_mul_montq movq 0(%rsp),%rax leaq 0(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 128(%rsp),%rdi call __ecp_nistz256_mul_montq movq 160(%rsp),%rax leaq 160(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 192(%rsp),%rdi call __ecp_nistz256_mul_montq xorq %r11,%r11 addq %r12,%r12 leaq 96(%rsp),%rsi adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 movq 0(%rsi),%rax cmovcq %rbp,%r13 movq 8(%rsi),%rbp cmovcq %rcx,%r8 movq 16(%rsi),%rcx cmovcq %r10,%r9 movq 24(%rsi),%r10 call __ecp_nistz256_subq leaq 128(%rsp),%rbx leaq 288(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 192+0(%rsp),%rax movq 192+8(%rsp),%rbp movq 192+16(%rsp),%rcx movq 192+24(%rsp),%r10 leaq 320(%rsp),%rdi call __ecp_nistz256_subq movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 128(%rsp),%rax leaq 128(%rsp),%rbx movq 0+224(%rsp),%r9 movq 8+224(%rsp),%r10 leaq 0+224(%rsp),%rsi movq 16+224(%rsp),%r11 movq 24+224(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montq movq 320(%rsp),%rax leaq 320(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 320(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 256(%rsp),%rbx leaq 320(%rsp),%rdi call __ecp_nistz256_sub_fromq .byte 102,72,15,126,199 movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 352(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 352+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 544(%rsp),%xmm2 pand 544+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 448(%rsp),%xmm2 pand 448+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,64(%rdi) movdqu %xmm3,80(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 288(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 288+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 480(%rsp),%xmm2 pand 480+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 384(%rsp),%xmm2 pand 384+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 320(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 320+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 512(%rsp),%xmm2 pand 512+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 416(%rsp),%xmm2 pand 416+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) L$add_doneq: leaq 576+56(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbx movq -8(%rsi),%rbp leaq (%rsi),%rsp L$point_addq_epilogue: .byte 0xf3,0xc3 .globl _ecp_nistz256_point_add_affine .private_extern _ecp_nistz256_point_add_affine .p2align 5 _ecp_nistz256_point_add_affine: _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq _OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je L$point_add_affinex #endif pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $480+8,%rsp L$add_affineq_body: movdqu 0(%rsi),%xmm0 movq %rdx,%rbx movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm3 movdqu 64(%rsi),%xmm4 movdqu 80(%rsi),%xmm5 movq 64+0(%rsi),%rax movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 movdqa %xmm0,320(%rsp) movdqa %xmm1,320+16(%rsp) movdqa %xmm2,352(%rsp) movdqa %xmm3,352+16(%rsp) movdqa %xmm4,384(%rsp) movdqa %xmm5,384+16(%rsp) por %xmm4,%xmm5 movdqu 0(%rbx),%xmm0 pshufd $0xb1,%xmm5,%xmm3 movdqu 16(%rbx),%xmm1 movdqu 32(%rbx),%xmm2 por %xmm3,%xmm5 movdqu 48(%rbx),%xmm3 movdqa %xmm0,416(%rsp) pshufd $0x1e,%xmm5,%xmm4 movdqa %xmm1,416+16(%rsp) por %xmm0,%xmm1 .byte 102,72,15,110,199 movdqa %xmm2,448(%rsp) movdqa %xmm3,448+16(%rsp) por %xmm2,%xmm3 por %xmm4,%xmm5 pxor %xmm4,%xmm4 por %xmm1,%xmm3 leaq 64-0(%rsi),%rsi leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montq pcmpeqd %xmm4,%xmm5 pshufd $0xb1,%xmm3,%xmm4 movq 0(%rbx),%rax movq %r12,%r9 por %xmm3,%xmm4 pshufd $0,%xmm5,%xmm5 pshufd $0x1e,%xmm4,%xmm3 movq %r13,%r10 por %xmm3,%xmm4 pxor %xmm3,%xmm3 movq %r14,%r11 pcmpeqd %xmm3,%xmm4 pshufd $0,%xmm4,%xmm4 leaq 32-0(%rsp),%rsi movq %r15,%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 320(%rsp),%rbx leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 384(%rsp),%rax leaq 384(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montq movq 384(%rsp),%rax leaq 384(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 288(%rsp),%rdi call __ecp_nistz256_mul_montq movq 448(%rsp),%rax leaq 448(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 352(%rsp),%rbx leaq 96(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 0+64(%rsp),%rax movq 8+64(%rsp),%r14 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r15 movq 24+64(%rsp),%r8 leaq 128(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 0+96(%rsp),%rax movq 8+96(%rsp),%r14 leaq 0+96(%rsp),%rsi movq 16+96(%rsp),%r15 movq 24+96(%rsp),%r8 leaq 192(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 128(%rsp),%rax leaq 128(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 160(%rsp),%rdi call __ecp_nistz256_mul_montq movq 320(%rsp),%rax leaq 320(%rsp),%rbx movq 0+128(%rsp),%r9 movq 8+128(%rsp),%r10 leaq 0+128(%rsp),%rsi movq 16+128(%rsp),%r11 movq 24+128(%rsp),%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montq xorq %r11,%r11 addq %r12,%r12 leaq 192(%rsp),%rsi adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 movq 0(%rsi),%rax cmovcq %rbp,%r13 movq 8(%rsi),%rbp cmovcq %rcx,%r8 movq 16(%rsi),%rcx cmovcq %r10,%r9 movq 24(%rsi),%r10 call __ecp_nistz256_subq leaq 160(%rsp),%rbx leaq 224(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 0+0(%rsp),%rax movq 0+8(%rsp),%rbp movq 0+16(%rsp),%rcx movq 0+24(%rsp),%r10 leaq 64(%rsp),%rdi call __ecp_nistz256_subq movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 352(%rsp),%rax leaq 352(%rsp),%rbx movq 0+160(%rsp),%r9 movq 8+160(%rsp),%r10 leaq 0+160(%rsp),%rsi movq 16+160(%rsp),%r11 movq 24+160(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montq movq 96(%rsp),%rax leaq 96(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 64(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 32(%rsp),%rbx leaq 256(%rsp),%rdi call __ecp_nistz256_sub_fromq .byte 102,72,15,126,199 movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 288(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 288+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand L$ONE_mont(%rip),%xmm2 pand L$ONE_mont+16(%rip),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 384(%rsp),%xmm2 pand 384+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,64(%rdi) movdqu %xmm3,80(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 224(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 224+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 416(%rsp),%xmm2 pand 416+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 320(%rsp),%xmm2 pand 320+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 256(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 256+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 448(%rsp),%xmm2 pand 448+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 352(%rsp),%xmm2 pand 352+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) leaq 480+56(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbx movq -8(%rsi),%rbp leaq (%rsi),%rsp L$add_affineq_epilogue: .byte 0xf3,0xc3 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .p2align 5 __ecp_nistz256_add_tox: xorq %r11,%r11 adcq 0(%rbx),%r12 adcq 8(%rbx),%r13 movq %r12,%rax adcq 16(%rbx),%r8 adcq 24(%rbx),%r9 movq %r13,%rbp adcq $0,%r11 xorq %r10,%r10 sbbq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_sub_fromx: xorq %r11,%r11 sbbq 0(%rbx),%r12 sbbq 8(%rbx),%r13 movq %r12,%rax sbbq 16(%rbx),%r8 sbbq 24(%rbx),%r9 movq %r13,%rbp sbbq $0,%r11 xorq %r10,%r10 adcq $-1,%r12 movq %r8,%rcx adcq %r14,%r13 adcq $0,%r8 movq %r9,%r10 adcq %r15,%r9 btq $0,%r11 cmovncq %rax,%r12 cmovncq %rbp,%r13 movq %r12,0(%rdi) cmovncq %rcx,%r8 movq %r13,8(%rdi) cmovncq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_subx: xorq %r11,%r11 sbbq %r12,%rax sbbq %r13,%rbp movq %rax,%r12 sbbq %r8,%rcx sbbq %r9,%r10 movq %rbp,%r13 sbbq $0,%r11 xorq %r9,%r9 adcq $-1,%rax movq %rcx,%r8 adcq %r14,%rbp adcq $0,%rcx movq %r10,%r9 adcq %r15,%r10 btq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 cmovcq %rcx,%r8 cmovcq %r10,%r9 .byte 0xf3,0xc3 .p2align 5 __ecp_nistz256_mul_by_2x: xorq %r11,%r11 adcq %r12,%r12 adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 xorq %r10,%r10 sbbq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .p2align 5 ecp_nistz256_point_doublex: L$point_doublex: pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $160+8,%rsp L$point_doublex_body: L$point_double_shortcutx: movdqu 0(%rsi),%xmm0 movq %rsi,%rbx movdqu 16(%rsi),%xmm1 movq 32+0(%rsi),%r12 movq 32+8(%rsi),%r13 movq 32+16(%rsi),%r8 movq 32+24(%rsi),%r9 movq L$poly+8(%rip),%r14 movq L$poly+24(%rip),%r15 movdqa %xmm0,96(%rsp) movdqa %xmm1,96+16(%rsp) leaq 32(%rdi),%r10 leaq 64(%rdi),%r11 .byte 102,72,15,110,199 .byte 102,73,15,110,202 .byte 102,73,15,110,211 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_by_2x movq 64+0(%rsi),%rdx movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 leaq 64-128(%rsi),%rsi leaq 64(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 0+0(%rsp),%rdx movq 8+0(%rsp),%r14 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 leaq 0(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 32(%rbx),%rdx movq 64+0(%rbx),%r9 movq 64+8(%rbx),%r10 movq 64+16(%rbx),%r11 movq 64+24(%rbx),%r12 leaq 64-128(%rbx),%rsi leaq 32(%rbx),%rbx .byte 102,72,15,126,215 call __ecp_nistz256_mul_montx call __ecp_nistz256_mul_by_2x movq 96+0(%rsp),%r12 movq 96+8(%rsp),%r13 leaq 64(%rsp),%rbx movq 96+16(%rsp),%r8 movq 96+24(%rsp),%r9 leaq 32(%rsp),%rdi call __ecp_nistz256_add_tox movq 96+0(%rsp),%r12 movq 96+8(%rsp),%r13 leaq 64(%rsp),%rbx movq 96+16(%rsp),%r8 movq 96+24(%rsp),%r9 leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 0+0(%rsp),%rdx movq 8+0(%rsp),%r14 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 .byte 102,72,15,126,207 call __ecp_nistz256_sqr_montx xorq %r9,%r9 movq %r12,%rax addq $-1,%r12 movq %r13,%r10 adcq %rsi,%r13 movq %r14,%rcx adcq $0,%r14 movq %r15,%r8 adcq %rbp,%r15 adcq $0,%r9 xorq %rsi,%rsi testq $1,%rax cmovzq %rax,%r12 cmovzq %r10,%r13 cmovzq %rcx,%r14 cmovzq %r8,%r15 cmovzq %rsi,%r9 movq %r13,%rax shrq $1,%r12 shlq $63,%rax movq %r14,%r10 shrq $1,%r13 orq %rax,%r12 shlq $63,%r10 movq %r15,%rcx shrq $1,%r14 orq %r10,%r13 shlq $63,%rcx movq %r12,0(%rdi) shrq $1,%r15 movq %r13,8(%rdi) shlq $63,%r9 orq %rcx,%r14 orq %r9,%r15 movq %r14,16(%rdi) movq %r15,24(%rdi) movq 64(%rsp),%rdx leaq 64(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 128(%rsp),%rdi call __ecp_nistz256_mul_by_2x leaq 32(%rsp),%rbx leaq 32(%rsp),%rdi call __ecp_nistz256_add_tox movq 96(%rsp),%rdx leaq 96(%rsp),%rbx movq 0+0(%rsp),%r9 movq 8+0(%rsp),%r10 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r11 movq 24+0(%rsp),%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 128(%rsp),%rdi call __ecp_nistz256_mul_by_2x movq 0+32(%rsp),%rdx movq 8+32(%rsp),%r14 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r15 movq 24+32(%rsp),%r8 .byte 102,72,15,126,199 call __ecp_nistz256_sqr_montx leaq 128(%rsp),%rbx movq %r14,%r8 movq %r15,%r9 movq %rsi,%r14 movq %rbp,%r15 call __ecp_nistz256_sub_fromx movq 0+0(%rsp),%rax movq 0+8(%rsp),%rbp movq 0+16(%rsp),%rcx movq 0+24(%rsp),%r10 leaq 0(%rsp),%rdi call __ecp_nistz256_subx movq 32(%rsp),%rdx leaq 32(%rsp),%rbx movq %r12,%r14 xorl %ecx,%ecx movq %r12,0+0(%rsp) movq %r13,%r10 movq %r13,0+8(%rsp) cmovzq %r8,%r11 movq %r8,0+16(%rsp) leaq 0-128(%rsp),%rsi cmovzq %r9,%r12 movq %r9,0+24(%rsp) movq %r14,%r9 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montx .byte 102,72,15,126,203 .byte 102,72,15,126,207 call __ecp_nistz256_sub_fromx leaq 160+56(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbx movq -8(%rsi),%rbp leaq (%rsi),%rsp L$point_doublex_epilogue: .byte 0xf3,0xc3 .p2align 5 ecp_nistz256_point_addx: L$point_addx: pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $576+8,%rsp L$point_addx_body: movdqu 0(%rsi),%xmm0 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm3 movdqu 64(%rsi),%xmm4 movdqu 80(%rsi),%xmm5 movq %rsi,%rbx movq %rdx,%rsi movdqa %xmm0,384(%rsp) movdqa %xmm1,384+16(%rsp) movdqa %xmm2,416(%rsp) movdqa %xmm3,416+16(%rsp) movdqa %xmm4,448(%rsp) movdqa %xmm5,448+16(%rsp) por %xmm4,%xmm5 movdqu 0(%rsi),%xmm0 pshufd $0xb1,%xmm5,%xmm3 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 por %xmm3,%xmm5 movdqu 48(%rsi),%xmm3 movq 64+0(%rsi),%rdx movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 movdqa %xmm0,480(%rsp) pshufd $0x1e,%xmm5,%xmm4 movdqa %xmm1,480+16(%rsp) movdqu 64(%rsi),%xmm0 movdqu 80(%rsi),%xmm1 movdqa %xmm2,512(%rsp) movdqa %xmm3,512+16(%rsp) por %xmm4,%xmm5 pxor %xmm4,%xmm4 por %xmm0,%xmm1 .byte 102,72,15,110,199 leaq 64-128(%rsi),%rsi movq %rdx,544+0(%rsp) movq %r14,544+8(%rsp) movq %r15,544+16(%rsp) movq %r8,544+24(%rsp) leaq 96(%rsp),%rdi call __ecp_nistz256_sqr_montx pcmpeqd %xmm4,%xmm5 pshufd $0xb1,%xmm1,%xmm4 por %xmm1,%xmm4 pshufd $0,%xmm5,%xmm5 pshufd $0x1e,%xmm4,%xmm3 por %xmm3,%xmm4 pxor %xmm3,%xmm3 pcmpeqd %xmm3,%xmm4 pshufd $0,%xmm4,%xmm4 movq 64+0(%rbx),%rdx movq 64+8(%rbx),%r14 movq 64+16(%rbx),%r15 movq 64+24(%rbx),%r8 .byte 102,72,15,110,203 leaq 64-128(%rbx),%rsi leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 544(%rsp),%rdx leaq 544(%rsp),%rbx movq 0+96(%rsp),%r9 movq 8+96(%rsp),%r10 leaq -128+96(%rsp),%rsi movq 16+96(%rsp),%r11 movq 24+96(%rsp),%r12 leaq 224(%rsp),%rdi call __ecp_nistz256_mul_montx movq 448(%rsp),%rdx leaq 448(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montx movq 416(%rsp),%rdx leaq 416(%rsp),%rbx movq 0+224(%rsp),%r9 movq 8+224(%rsp),%r10 leaq -128+224(%rsp),%rsi movq 16+224(%rsp),%r11 movq 24+224(%rsp),%r12 leaq 224(%rsp),%rdi call __ecp_nistz256_mul_montx movq 512(%rsp),%rdx leaq 512(%rsp),%rbx movq 0+256(%rsp),%r9 movq 8+256(%rsp),%r10 leaq -128+256(%rsp),%rsi movq 16+256(%rsp),%r11 movq 24+256(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 224(%rsp),%rbx leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromx orq %r13,%r12 movdqa %xmm4,%xmm2 orq %r8,%r12 orq %r9,%r12 por %xmm5,%xmm2 .byte 102,73,15,110,220 movq 384(%rsp),%rdx leaq 384(%rsp),%rbx movq 0+96(%rsp),%r9 movq 8+96(%rsp),%r10 leaq -128+96(%rsp),%rsi movq 16+96(%rsp),%r11 movq 24+96(%rsp),%r12 leaq 160(%rsp),%rdi call __ecp_nistz256_mul_montx movq 480(%rsp),%rdx leaq 480(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 192(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 160(%rsp),%rbx leaq 0(%rsp),%rdi call __ecp_nistz256_sub_fromx orq %r13,%r12 orq %r8,%r12 orq %r9,%r12 .byte 102,73,15,126,208 .byte 102,73,15,126,217 orq %r8,%r12 .byte 0x3e jnz L$add_proceedx testq %r9,%r9 jz L$add_doublex .byte 102,72,15,126,199 pxor %xmm0,%xmm0 movdqu %xmm0,0(%rdi) movdqu %xmm0,16(%rdi) movdqu %xmm0,32(%rdi) movdqu %xmm0,48(%rdi) movdqu %xmm0,64(%rdi) movdqu %xmm0,80(%rdi) jmp L$add_donex .p2align 5 L$add_doublex: .byte 102,72,15,126,206 .byte 102,72,15,126,199 addq $416,%rsp jmp L$point_double_shortcutx .p2align 5 L$add_proceedx: movq 0+64(%rsp),%rdx movq 8+64(%rsp),%r14 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r15 movq 24+64(%rsp),%r8 leaq 96(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 448(%rsp),%rdx leaq 448(%rsp),%rbx movq 0+0(%rsp),%r9 movq 8+0(%rsp),%r10 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r11 movq 24+0(%rsp),%r12 leaq 352(%rsp),%rdi call __ecp_nistz256_mul_montx movq 0+0(%rsp),%rdx movq 8+0(%rsp),%r14 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 544(%rsp),%rdx leaq 544(%rsp),%rbx movq 0+352(%rsp),%r9 movq 8+352(%rsp),%r10 leaq -128+352(%rsp),%rsi movq 16+352(%rsp),%r11 movq 24+352(%rsp),%r12 leaq 352(%rsp),%rdi call __ecp_nistz256_mul_montx movq 0(%rsp),%rdx leaq 0(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 128(%rsp),%rdi call __ecp_nistz256_mul_montx movq 160(%rsp),%rdx leaq 160(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 192(%rsp),%rdi call __ecp_nistz256_mul_montx xorq %r11,%r11 addq %r12,%r12 leaq 96(%rsp),%rsi adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 movq 0(%rsi),%rax cmovcq %rbp,%r13 movq 8(%rsi),%rbp cmovcq %rcx,%r8 movq 16(%rsi),%rcx cmovcq %r10,%r9 movq 24(%rsi),%r10 call __ecp_nistz256_subx leaq 128(%rsp),%rbx leaq 288(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 192+0(%rsp),%rax movq 192+8(%rsp),%rbp movq 192+16(%rsp),%rcx movq 192+24(%rsp),%r10 leaq 320(%rsp),%rdi call __ecp_nistz256_subx movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 128(%rsp),%rdx leaq 128(%rsp),%rbx movq 0+224(%rsp),%r9 movq 8+224(%rsp),%r10 leaq -128+224(%rsp),%rsi movq 16+224(%rsp),%r11 movq 24+224(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montx movq 320(%rsp),%rdx leaq 320(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 320(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 256(%rsp),%rbx leaq 320(%rsp),%rdi call __ecp_nistz256_sub_fromx .byte 102,72,15,126,199 movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 352(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 352+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 544(%rsp),%xmm2 pand 544+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 448(%rsp),%xmm2 pand 448+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,64(%rdi) movdqu %xmm3,80(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 288(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 288+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 480(%rsp),%xmm2 pand 480+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 384(%rsp),%xmm2 pand 384+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 320(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 320+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 512(%rsp),%xmm2 pand 512+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 416(%rsp),%xmm2 pand 416+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) L$add_donex: leaq 576+56(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbx movq -8(%rsi),%rbp leaq (%rsi),%rsp L$point_addx_epilogue: .byte 0xf3,0xc3 .p2align 5 ecp_nistz256_point_add_affinex: L$point_add_affinex: pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $480+8,%rsp L$add_affinex_body: movdqu 0(%rsi),%xmm0 movq %rdx,%rbx movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm3 movdqu 64(%rsi),%xmm4 movdqu 80(%rsi),%xmm5 movq 64+0(%rsi),%rdx movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 movdqa %xmm0,320(%rsp) movdqa %xmm1,320+16(%rsp) movdqa %xmm2,352(%rsp) movdqa %xmm3,352+16(%rsp) movdqa %xmm4,384(%rsp) movdqa %xmm5,384+16(%rsp) por %xmm4,%xmm5 movdqu 0(%rbx),%xmm0 pshufd $0xb1,%xmm5,%xmm3 movdqu 16(%rbx),%xmm1 movdqu 32(%rbx),%xmm2 por %xmm3,%xmm5 movdqu 48(%rbx),%xmm3 movdqa %xmm0,416(%rsp) pshufd $0x1e,%xmm5,%xmm4 movdqa %xmm1,416+16(%rsp) por %xmm0,%xmm1 .byte 102,72,15,110,199 movdqa %xmm2,448(%rsp) movdqa %xmm3,448+16(%rsp) por %xmm2,%xmm3 por %xmm4,%xmm5 pxor %xmm4,%xmm4 por %xmm1,%xmm3 leaq 64-128(%rsi),%rsi leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montx pcmpeqd %xmm4,%xmm5 pshufd $0xb1,%xmm3,%xmm4 movq 0(%rbx),%rdx movq %r12,%r9 por %xmm3,%xmm4 pshufd $0,%xmm5,%xmm5 pshufd $0x1e,%xmm4,%xmm3 movq %r13,%r10 por %xmm3,%xmm4 pxor %xmm3,%xmm3 movq %r14,%r11 pcmpeqd %xmm3,%xmm4 pshufd $0,%xmm4,%xmm4 leaq 32-128(%rsp),%rsi movq %r15,%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 320(%rsp),%rbx leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 384(%rsp),%rdx leaq 384(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montx movq 384(%rsp),%rdx leaq 384(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 288(%rsp),%rdi call __ecp_nistz256_mul_montx movq 448(%rsp),%rdx leaq 448(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 352(%rsp),%rbx leaq 96(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 0+64(%rsp),%rdx movq 8+64(%rsp),%r14 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r15 movq 24+64(%rsp),%r8 leaq 128(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 0+96(%rsp),%rdx movq 8+96(%rsp),%r14 leaq -128+96(%rsp),%rsi movq 16+96(%rsp),%r15 movq 24+96(%rsp),%r8 leaq 192(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 128(%rsp),%rdx leaq 128(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 160(%rsp),%rdi call __ecp_nistz256_mul_montx movq 320(%rsp),%rdx leaq 320(%rsp),%rbx movq 0+128(%rsp),%r9 movq 8+128(%rsp),%r10 leaq -128+128(%rsp),%rsi movq 16+128(%rsp),%r11 movq 24+128(%rsp),%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montx xorq %r11,%r11 addq %r12,%r12 leaq 192(%rsp),%rsi adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 movq 0(%rsi),%rax cmovcq %rbp,%r13 movq 8(%rsi),%rbp cmovcq %rcx,%r8 movq 16(%rsi),%rcx cmovcq %r10,%r9 movq 24(%rsi),%r10 call __ecp_nistz256_subx leaq 160(%rsp),%rbx leaq 224(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 0+0(%rsp),%rax movq 0+8(%rsp),%rbp movq 0+16(%rsp),%rcx movq 0+24(%rsp),%r10 leaq 64(%rsp),%rdi call __ecp_nistz256_subx movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 352(%rsp),%rdx leaq 352(%rsp),%rbx movq 0+160(%rsp),%r9 movq 8+160(%rsp),%r10 leaq -128+160(%rsp),%rsi movq 16+160(%rsp),%r11 movq 24+160(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montx movq 96(%rsp),%rdx leaq 96(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 64(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 32(%rsp),%rbx leaq 256(%rsp),%rdi call __ecp_nistz256_sub_fromx .byte 102,72,15,126,199 movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 288(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 288+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand L$ONE_mont(%rip),%xmm2 pand L$ONE_mont+16(%rip),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 384(%rsp),%xmm2 pand 384+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,64(%rdi) movdqu %xmm3,80(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 224(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 224+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 416(%rsp),%xmm2 pand 416+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 320(%rsp),%xmm2 pand 320+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 256(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 256+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 448(%rsp),%xmm2 pand 448+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 352(%rsp),%xmm2 pand 352+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) leaq 480+56(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbx movq -8(%rsi),%rbp leaq (%rsi),%rsp L$add_affinex_epilogue: .byte 0xf3,0xc3 #endif #endif
marvin-hansen/iggy-streaming-system
18,843
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/x86_64-mont.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _bn_mul_mont_nohw .private_extern _bn_mul_mont_nohw .p2align 4 _bn_mul_mont_nohw: _CET_ENDBR movl %r9d,%r9d movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 negq %r9 movq %rsp,%r11 leaq -16(%rsp,%r9,8),%r10 negq %r9 andq $-1024,%r10 subq %r10,%r11 andq $-4096,%r11 leaq (%r10,%r11,1),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja L$mul_page_walk jmp L$mul_page_walk_done .p2align 4 L$mul_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja L$mul_page_walk L$mul_page_walk_done: movq %rax,8(%rsp,%r9,8) L$mul_body: movq %rdx,%r12 movq (%r8),%r8 movq (%r12),%rbx movq (%rsi),%rax xorq %r14,%r14 xorq %r15,%r15 movq %r8,%rbp mulq %rbx movq %rax,%r10 movq (%rcx),%rax imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%r13 leaq 1(%r15),%r15 jmp L$1st_enter .p2align 4 L$1st: addq %rax,%r13 movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%r13 movq %r10,%r11 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 L$1st_enter: mulq %rbx addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx leaq 1(%r15),%r15 movq %rdx,%r10 mulq %rbp cmpq %r9,%r15 jne L$1st addq %rax,%r13 movq (%rsi),%rax adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 movq %r10,%r11 xorq %rdx,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r9,8) movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 jmp L$outer .p2align 4 L$outer: movq (%r12,%r14,8),%rbx xorq %r15,%r15 movq %r8,%rbp movq (%rsp),%r10 mulq %rbx addq %rax,%r10 movq (%rcx),%rax adcq $0,%rdx imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq 8(%rsp),%r10 movq %rdx,%r13 leaq 1(%r15),%r15 jmp L$inner_enter .p2align 4 L$inner: addq %rax,%r13 movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 movq (%rsp,%r15,8),%r10 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 L$inner_enter: mulq %rbx addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx addq %r11,%r10 movq %rdx,%r11 adcq $0,%r11 leaq 1(%r15),%r15 mulq %rbp cmpq %r9,%r15 jne L$inner addq %rax,%r13 movq (%rsi),%rax adcq $0,%rdx addq %r10,%r13 movq (%rsp,%r15,8),%r10 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 xorq %rdx,%rdx addq %r11,%r13 adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r9,8) movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 cmpq %r9,%r14 jb L$outer xorq %r14,%r14 movq (%rsp),%rax movq %r9,%r15 .p2align 4 L$sub: sbbq (%rcx,%r14,8),%rax movq %rax,(%rdi,%r14,8) movq 8(%rsp,%r14,8),%rax leaq 1(%r14),%r14 decq %r15 jnz L$sub sbbq $0,%rax movq $-1,%rbx xorq %rax,%rbx xorq %r14,%r14 movq %r9,%r15 L$copy: movq (%rdi,%r14,8),%rcx movq (%rsp,%r14,8),%rdx andq %rbx,%rcx andq %rax,%rdx movq %r9,(%rsp,%r14,8) orq %rcx,%rdx movq %rdx,(%rdi,%r14,8) leaq 1(%r14),%r14 subq $1,%r15 jnz L$copy movq 8(%rsp,%r9,8),%rsi movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$mul_epilogue: .byte 0xf3,0xc3 .globl _bn_mul4x_mont .private_extern _bn_mul4x_mont .p2align 4 _bn_mul4x_mont: _CET_ENDBR movl %r9d,%r9d movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 negq %r9 movq %rsp,%r11 leaq -32(%rsp,%r9,8),%r10 negq %r9 andq $-1024,%r10 subq %r10,%r11 andq $-4096,%r11 leaq (%r10,%r11,1),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja L$mul4x_page_walk jmp L$mul4x_page_walk_done L$mul4x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja L$mul4x_page_walk L$mul4x_page_walk_done: movq %rax,8(%rsp,%r9,8) L$mul4x_body: movq %rdi,16(%rsp,%r9,8) movq %rdx,%r12 movq (%r8),%r8 movq (%r12),%rbx movq (%rsi),%rax xorq %r14,%r14 xorq %r15,%r15 movq %r8,%rbp mulq %rbx movq %rax,%r10 movq (%rcx),%rax imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi),%rax adcq $0,%rdx addq %r11,%rdi leaq 4(%r15),%r15 adcq $0,%rdx movq %rdi,(%rsp) movq %rdx,%r13 jmp L$1st4x .p2align 4 L$1st4x: mulq %rbx addq %rax,%r10 movq -16(%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%rsp,%r15,8) movq %rdx,%r13 mulq %rbx addq %rax,%r10 movq (%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq 8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx,%r15,8),%rax adcq $0,%rdx leaq 4(%r15),%r15 movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq -16(%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-32(%rsp,%r15,8) movq %rdx,%r13 cmpq %r9,%r15 jb L$1st4x mulq %rbx addq %rax,%r10 movq -16(%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%rsp,%r15,8) movq %rdx,%r13 xorq %rdi,%rdi addq %r10,%r13 adcq $0,%rdi movq %r13,-8(%rsp,%r15,8) movq %rdi,(%rsp,%r15,8) leaq 1(%r14),%r14 .p2align 2 L$outer4x: movq (%r12,%r14,8),%rbx xorq %r15,%r15 movq (%rsp),%r10 movq %r8,%rbp mulq %rbx addq %rax,%r10 movq (%rcx),%rax adcq $0,%rdx imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx addq 8(%rsp),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi),%rax adcq $0,%rdx addq %r11,%rdi leaq 4(%r15),%r15 adcq $0,%rdx movq %rdi,(%rsp) movq %rdx,%r13 jmp L$inner4x .p2align 4 L$inner4x: mulq %rbx addq %rax,%r10 movq -16(%rcx,%r15,8),%rax adcq $0,%rdx addq -16(%rsp,%r15,8),%r10 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx,%r15,8),%rax adcq $0,%rdx addq -8(%rsp,%r15,8),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%rsp,%r15,8) movq %rdx,%r13 mulq %rbx addq %rax,%r10 movq (%rcx,%r15,8),%rax adcq $0,%rdx addq (%rsp,%r15,8),%r10 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq 8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx,%r15,8),%rax adcq $0,%rdx addq 8(%rsp,%r15,8),%r11 adcq $0,%rdx leaq 4(%r15),%r15 movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq -16(%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-32(%rsp,%r15,8) movq %rdx,%r13 cmpq %r9,%r15 jb L$inner4x mulq %rbx addq %rax,%r10 movq -16(%rcx,%r15,8),%rax adcq $0,%rdx addq -16(%rsp,%r15,8),%r10 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx,%r15,8),%rax adcq $0,%rdx addq -8(%rsp,%r15,8),%r11 adcq $0,%rdx leaq 1(%r14),%r14 movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%rsp,%r15,8) movq %rdx,%r13 xorq %rdi,%rdi addq %r10,%r13 adcq $0,%rdi addq (%rsp,%r9,8),%r13 adcq $0,%rdi movq %r13,-8(%rsp,%r15,8) movq %rdi,(%rsp,%r15,8) cmpq %r9,%r14 jb L$outer4x movq 16(%rsp,%r9,8),%rdi leaq -4(%r9),%r15 movq 0(%rsp),%rax movq 8(%rsp),%rdx shrq $2,%r15 leaq (%rsp),%rsi xorq %r14,%r14 subq 0(%rcx),%rax movq 16(%rsi),%rbx movq 24(%rsi),%rbp sbbq 8(%rcx),%rdx L$sub4x: movq %rax,0(%rdi,%r14,8) movq %rdx,8(%rdi,%r14,8) sbbq 16(%rcx,%r14,8),%rbx movq 32(%rsi,%r14,8),%rax movq 40(%rsi,%r14,8),%rdx sbbq 24(%rcx,%r14,8),%rbp movq %rbx,16(%rdi,%r14,8) movq %rbp,24(%rdi,%r14,8) sbbq 32(%rcx,%r14,8),%rax movq 48(%rsi,%r14,8),%rbx movq 56(%rsi,%r14,8),%rbp sbbq 40(%rcx,%r14,8),%rdx leaq 4(%r14),%r14 decq %r15 jnz L$sub4x movq %rax,0(%rdi,%r14,8) movq 32(%rsi,%r14,8),%rax sbbq 16(%rcx,%r14,8),%rbx movq %rdx,8(%rdi,%r14,8) sbbq 24(%rcx,%r14,8),%rbp movq %rbx,16(%rdi,%r14,8) sbbq $0,%rax movq %rbp,24(%rdi,%r14,8) pxor %xmm0,%xmm0 .byte 102,72,15,110,224 pcmpeqd %xmm5,%xmm5 pshufd $0,%xmm4,%xmm4 movq %r9,%r15 pxor %xmm4,%xmm5 shrq $2,%r15 xorl %eax,%eax jmp L$copy4x .p2align 4 L$copy4x: movdqa (%rsp,%rax,1),%xmm1 movdqu (%rdi,%rax,1),%xmm2 pand %xmm4,%xmm1 pand %xmm5,%xmm2 movdqa 16(%rsp,%rax,1),%xmm3 movdqa %xmm0,(%rsp,%rax,1) por %xmm2,%xmm1 movdqu 16(%rdi,%rax,1),%xmm2 movdqu %xmm1,(%rdi,%rax,1) pand %xmm4,%xmm3 pand %xmm5,%xmm2 movdqa %xmm0,16(%rsp,%rax,1) por %xmm2,%xmm3 movdqu %xmm3,16(%rdi,%rax,1) leaq 32(%rax),%rax decq %r15 jnz L$copy4x movq 8(%rsp,%r9,8),%rsi movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$mul4x_epilogue: .byte 0xf3,0xc3 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX #endif .globl _bn_sqr8x_mont .private_extern _bn_sqr8x_mont .p2align 5 _bn_sqr8x_mont: _CET_ENDBR movl %r9d,%r9d movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$sqr8x_prologue: movl %r9d,%r10d shll $3,%r9d shlq $3+2,%r10 negq %r9 leaq -64(%rsp,%r9,2),%r11 movq %rsp,%rbp movq (%r8),%r8 subq %rsi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb L$sqr8x_sp_alt subq %r11,%rbp leaq -64(%rbp,%r9,2),%rbp jmp L$sqr8x_sp_done .p2align 5 L$sqr8x_sp_alt: leaq 4096-64(,%r9,2),%r10 leaq -64(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp L$sqr8x_sp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$sqr8x_page_walk jmp L$sqr8x_page_walk_done .p2align 4 L$sqr8x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$sqr8x_page_walk L$sqr8x_page_walk_done: movq %r9,%r10 negq %r9 movq %r8,32(%rsp) movq %rax,40(%rsp) L$sqr8x_body: .byte 102,72,15,110,209 pxor %xmm0,%xmm0 .byte 102,72,15,110,207 .byte 102,73,15,110,218 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX testq %rdx,%rdx jz L$sqr8x_nox call _bn_sqrx8x_internal leaq (%r8,%rcx,1),%rbx movq %rcx,%r9 movq %rcx,%rdx .byte 102,72,15,126,207 sarq $3+2,%rcx jmp L$sqr8x_sub .p2align 5 L$sqr8x_nox: #endif call _bn_sqr8x_internal leaq (%rdi,%r9,1),%rbx movq %r9,%rcx movq %r9,%rdx .byte 102,72,15,126,207 sarq $3+2,%rcx jmp L$sqr8x_sub .p2align 5 L$sqr8x_sub: movq 0(%rbx),%r12 movq 8(%rbx),%r13 movq 16(%rbx),%r14 movq 24(%rbx),%r15 leaq 32(%rbx),%rbx sbbq 0(%rbp),%r12 sbbq 8(%rbp),%r13 sbbq 16(%rbp),%r14 sbbq 24(%rbp),%r15 leaq 32(%rbp),%rbp movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r14,16(%rdi) movq %r15,24(%rdi) leaq 32(%rdi),%rdi incq %rcx jnz L$sqr8x_sub sbbq $0,%rax leaq (%rbx,%r9,1),%rbx leaq (%rdi,%r9,1),%rdi .byte 102,72,15,110,200 pxor %xmm0,%xmm0 pshufd $0,%xmm1,%xmm1 movq 40(%rsp),%rsi jmp L$sqr8x_cond_copy .p2align 5 L$sqr8x_cond_copy: movdqa 0(%rbx),%xmm2 movdqa 16(%rbx),%xmm3 leaq 32(%rbx),%rbx movdqu 0(%rdi),%xmm4 movdqu 16(%rdi),%xmm5 leaq 32(%rdi),%rdi movdqa %xmm0,-32(%rbx) movdqa %xmm0,-16(%rbx) movdqa %xmm0,-32(%rbx,%rdx,1) movdqa %xmm0,-16(%rbx,%rdx,1) pcmpeqd %xmm1,%xmm0 pand %xmm1,%xmm2 pand %xmm1,%xmm3 pand %xmm0,%xmm4 pand %xmm0,%xmm5 pxor %xmm0,%xmm0 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqu %xmm4,-32(%rdi) movdqu %xmm5,-16(%rdi) addq $32,%r9 jnz L$sqr8x_cond_copy movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$sqr8x_epilogue: .byte 0xf3,0xc3 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .globl _bn_mulx4x_mont .private_extern _bn_mulx4x_mont .p2align 5 _bn_mulx4x_mont: _CET_ENDBR movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 L$mulx4x_prologue: shll $3,%r9d xorq %r10,%r10 subq %r9,%r10 movq (%r8),%r8 leaq -72(%rsp,%r10,1),%rbp andq $-128,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$mulx4x_page_walk jmp L$mulx4x_page_walk_done .p2align 4 L$mulx4x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja L$mulx4x_page_walk L$mulx4x_page_walk_done: leaq (%rdx,%r9,1),%r10 movq %r9,0(%rsp) shrq $5,%r9 movq %r10,16(%rsp) subq $1,%r9 movq %r8,24(%rsp) movq %rdi,32(%rsp) movq %rax,40(%rsp) movq %r9,48(%rsp) jmp L$mulx4x_body .p2align 5 L$mulx4x_body: leaq 8(%rdx),%rdi movq (%rdx),%rdx leaq 64+32(%rsp),%rbx movq %rdx,%r9 mulxq 0(%rsi),%r8,%rax mulxq 8(%rsi),%r11,%r14 addq %rax,%r11 movq %rdi,8(%rsp) mulxq 16(%rsi),%r12,%r13 adcq %r14,%r12 adcq $0,%r13 movq %r8,%rdi imulq 24(%rsp),%r8 xorq %rbp,%rbp mulxq 24(%rsi),%rax,%r14 movq %r8,%rdx leaq 32(%rsi),%rsi adcxq %rax,%r13 adcxq %rbp,%r14 mulxq 0(%rcx),%rax,%r10 adcxq %rax,%rdi adoxq %r11,%r10 mulxq 8(%rcx),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 .byte 0xc4,0x62,0xfb,0xf6,0xa1,0x10,0x00,0x00,0x00 movq 48(%rsp),%rdi movq %r10,-32(%rbx) adcxq %rax,%r11 adoxq %r13,%r12 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r11,-24(%rbx) adcxq %rax,%r12 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r12,-16(%rbx) jmp L$mulx4x_1st .p2align 5 L$mulx4x_1st: adcxq %rbp,%r15 mulxq 0(%rsi),%r10,%rax adcxq %r14,%r10 mulxq 8(%rsi),%r11,%r14 adcxq %rax,%r11 mulxq 16(%rsi),%r12,%rax adcxq %r14,%r12 mulxq 24(%rsi),%r13,%r14 .byte 0x67,0x67 movq %r8,%rdx adcxq %rax,%r13 adcxq %rbp,%r14 leaq 32(%rsi),%rsi leaq 32(%rbx),%rbx adoxq %r15,%r10 mulxq 0(%rcx),%rax,%r15 adcxq %rax,%r10 adoxq %r15,%r11 mulxq 8(%rcx),%rax,%r15 adcxq %rax,%r11 adoxq %r15,%r12 mulxq 16(%rcx),%rax,%r15 movq %r10,-40(%rbx) adcxq %rax,%r12 movq %r11,-32(%rbx) adoxq %r15,%r13 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r12,-24(%rbx) adcxq %rax,%r13 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r13,-16(%rbx) decq %rdi jnz L$mulx4x_1st movq 0(%rsp),%rax movq 8(%rsp),%rdi adcq %rbp,%r15 addq %r15,%r14 sbbq %r15,%r15 movq %r14,-8(%rbx) jmp L$mulx4x_outer .p2align 5 L$mulx4x_outer: movq (%rdi),%rdx leaq 8(%rdi),%rdi subq %rax,%rsi movq %r15,(%rbx) leaq 64+32(%rsp),%rbx subq %rax,%rcx mulxq 0(%rsi),%r8,%r11 xorl %ebp,%ebp movq %rdx,%r9 mulxq 8(%rsi),%r14,%r12 adoxq -32(%rbx),%r8 adcxq %r14,%r11 mulxq 16(%rsi),%r15,%r13 adoxq -24(%rbx),%r11 adcxq %r15,%r12 adoxq -16(%rbx),%r12 adcxq %rbp,%r13 adoxq %rbp,%r13 movq %rdi,8(%rsp) movq %r8,%r15 imulq 24(%rsp),%r8 xorl %ebp,%ebp mulxq 24(%rsi),%rax,%r14 movq %r8,%rdx adcxq %rax,%r13 adoxq -8(%rbx),%r13 adcxq %rbp,%r14 leaq 32(%rsi),%rsi adoxq %rbp,%r14 mulxq 0(%rcx),%rax,%r10 adcxq %rax,%r15 adoxq %r11,%r10 mulxq 8(%rcx),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 mulxq 16(%rcx),%rax,%r12 movq %r10,-32(%rbx) adcxq %rax,%r11 adoxq %r13,%r12 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r11,-24(%rbx) leaq 32(%rcx),%rcx adcxq %rax,%r12 adoxq %rbp,%r15 movq 48(%rsp),%rdi movq %r12,-16(%rbx) jmp L$mulx4x_inner .p2align 5 L$mulx4x_inner: mulxq 0(%rsi),%r10,%rax adcxq %rbp,%r15 adoxq %r14,%r10 mulxq 8(%rsi),%r11,%r14 adcxq 0(%rbx),%r10 adoxq %rax,%r11 mulxq 16(%rsi),%r12,%rax adcxq 8(%rbx),%r11 adoxq %r14,%r12 mulxq 24(%rsi),%r13,%r14 movq %r8,%rdx adcxq 16(%rbx),%r12 adoxq %rax,%r13 adcxq 24(%rbx),%r13 adoxq %rbp,%r14 leaq 32(%rsi),%rsi leaq 32(%rbx),%rbx adcxq %rbp,%r14 adoxq %r15,%r10 mulxq 0(%rcx),%rax,%r15 adcxq %rax,%r10 adoxq %r15,%r11 mulxq 8(%rcx),%rax,%r15 adcxq %rax,%r11 adoxq %r15,%r12 mulxq 16(%rcx),%rax,%r15 movq %r10,-40(%rbx) adcxq %rax,%r12 adoxq %r15,%r13 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r11,-32(%rbx) movq %r12,-24(%rbx) adcxq %rax,%r13 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r13,-16(%rbx) decq %rdi jnz L$mulx4x_inner movq 0(%rsp),%rax movq 8(%rsp),%rdi adcq %rbp,%r15 subq 0(%rbx),%rbp adcq %r15,%r14 sbbq %r15,%r15 movq %r14,-8(%rbx) cmpq 16(%rsp),%rdi jne L$mulx4x_outer leaq 64(%rsp),%rbx subq %rax,%rcx negq %r15 movq %rax,%rdx shrq $3+2,%rax movq 32(%rsp),%rdi jmp L$mulx4x_sub .p2align 5 L$mulx4x_sub: movq 0(%rbx),%r11 movq 8(%rbx),%r12 movq 16(%rbx),%r13 movq 24(%rbx),%r14 leaq 32(%rbx),%rbx sbbq 0(%rcx),%r11 sbbq 8(%rcx),%r12 sbbq 16(%rcx),%r13 sbbq 24(%rcx),%r14 leaq 32(%rcx),%rcx movq %r11,0(%rdi) movq %r12,8(%rdi) movq %r13,16(%rdi) movq %r14,24(%rdi) leaq 32(%rdi),%rdi decq %rax jnz L$mulx4x_sub sbbq $0,%r15 leaq 64(%rsp),%rbx subq %rdx,%rdi .byte 102,73,15,110,207 pxor %xmm0,%xmm0 pshufd $0,%xmm1,%xmm1 movq 40(%rsp),%rsi jmp L$mulx4x_cond_copy .p2align 5 L$mulx4x_cond_copy: movdqa 0(%rbx),%xmm2 movdqa 16(%rbx),%xmm3 leaq 32(%rbx),%rbx movdqu 0(%rdi),%xmm4 movdqu 16(%rdi),%xmm5 leaq 32(%rdi),%rdi movdqa %xmm0,-32(%rbx) movdqa %xmm0,-16(%rbx) pcmpeqd %xmm1,%xmm0 pand %xmm1,%xmm2 pand %xmm1,%xmm3 pand %xmm0,%xmm4 pand %xmm0,%xmm5 pxor %xmm0,%xmm0 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqu %xmm4,-32(%rdi) movdqu %xmm5,-16(%rdi) subq $32,%rdx jnz L$mulx4x_cond_copy movq %rdx,(%rbx) movq $1,%rax movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$mulx4x_epilogue: .byte 0xf3,0xc3 #endif .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .p2align 4 #endif
marvin-hansen/iggy-streaming-system
63,184
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/fipsmodule/aesni-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _aes_hw_encrypt .private_extern _aes_hw_encrypt .p2align 4 _aes_hw_encrypt: _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST movb $1,_BORINGSSL_function_hit+1(%rip) #endif movups (%rdi),%xmm2 movl 240(%rdx),%eax movups (%rdx),%xmm0 movups 16(%rdx),%xmm1 leaq 32(%rdx),%rdx xorps %xmm0,%xmm2 L$oop_enc1_1: .byte 102,15,56,220,209 decl %eax movups (%rdx),%xmm1 leaq 16(%rdx),%rdx jnz L$oop_enc1_1 .byte 102,15,56,221,209 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 .byte 0xf3,0xc3 .globl _aes_hw_decrypt .private_extern _aes_hw_decrypt .p2align 4 _aes_hw_decrypt: _CET_ENDBR movups (%rdi),%xmm2 movl 240(%rdx),%eax movups (%rdx),%xmm0 movups 16(%rdx),%xmm1 leaq 32(%rdx),%rdx xorps %xmm0,%xmm2 L$oop_dec1_2: .byte 102,15,56,222,209 decl %eax movups (%rdx),%xmm1 leaq 16(%rdx),%rdx jnz L$oop_dec1_2 .byte 102,15,56,223,209 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 .byte 0xf3,0xc3 .p2align 4 _aesni_encrypt2: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax addq $16,%rax L$enc_loop2: .byte 102,15,56,220,209 .byte 102,15,56,220,217 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 movups -16(%rcx,%rax,1),%xmm0 jnz L$enc_loop2 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 0xf3,0xc3 .p2align 4 _aesni_decrypt2: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax addq $16,%rax L$dec_loop2: .byte 102,15,56,222,209 .byte 102,15,56,222,217 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 movups -16(%rcx,%rax,1),%xmm0 jnz L$dec_loop2 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 0xf3,0xc3 .p2align 4 _aesni_encrypt3: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax addq $16,%rax L$enc_loop3: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 movups -16(%rcx,%rax,1),%xmm0 jnz L$enc_loop3 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 0xf3,0xc3 .p2align 4 _aesni_decrypt3: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax addq $16,%rax L$dec_loop3: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 movups -16(%rcx,%rax,1),%xmm0 jnz L$dec_loop3 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 0xf3,0xc3 .p2align 4 _aesni_encrypt4: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 xorps %xmm0,%xmm5 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 0x0f,0x1f,0x00 addq $16,%rax L$enc_loop4: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 movups -16(%rcx,%rax,1),%xmm0 jnz L$enc_loop4 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 0xf3,0xc3 .p2align 4 _aesni_decrypt4: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 xorps %xmm0,%xmm5 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 0x0f,0x1f,0x00 addq $16,%rax L$dec_loop4: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 movups -16(%rcx,%rax,1),%xmm0 jnz L$dec_loop4 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 0xf3,0xc3 .p2align 4 _aesni_encrypt6: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 pxor %xmm0,%xmm3 pxor %xmm0,%xmm4 .byte 102,15,56,220,209 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,220,217 pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 .byte 102,15,56,220,225 pxor %xmm0,%xmm7 movups (%rcx,%rax,1),%xmm0 addq $16,%rax jmp L$enc_loop6_enter .p2align 4 L$enc_loop6: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 L$enc_loop6_enter: .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 movups -16(%rcx,%rax,1),%xmm0 jnz L$enc_loop6 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 102,15,56,221,240 .byte 102,15,56,221,248 .byte 0xf3,0xc3 .p2align 4 _aesni_decrypt6: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 pxor %xmm0,%xmm3 pxor %xmm0,%xmm4 .byte 102,15,56,222,209 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,222,217 pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 .byte 102,15,56,222,225 pxor %xmm0,%xmm7 movups (%rcx,%rax,1),%xmm0 addq $16,%rax jmp L$dec_loop6_enter .p2align 4 L$dec_loop6: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 L$dec_loop6_enter: .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 movups -16(%rcx,%rax,1),%xmm0 jnz L$dec_loop6 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 102,15,56,223,240 .byte 102,15,56,223,248 .byte 0xf3,0xc3 .p2align 4 _aesni_encrypt8: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 pxor %xmm0,%xmm4 pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,220,209 pxor %xmm0,%xmm7 pxor %xmm0,%xmm8 .byte 102,15,56,220,217 pxor %xmm0,%xmm9 movups (%rcx,%rax,1),%xmm0 addq $16,%rax jmp L$enc_loop8_inner .p2align 4 L$enc_loop8: .byte 102,15,56,220,209 .byte 102,15,56,220,217 L$enc_loop8_inner: .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 L$enc_loop8_enter: movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups -16(%rcx,%rax,1),%xmm0 jnz L$enc_loop8 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 102,15,56,221,240 .byte 102,15,56,221,248 .byte 102,68,15,56,221,192 .byte 102,68,15,56,221,200 .byte 0xf3,0xc3 .p2align 4 _aesni_decrypt8: movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 pxor %xmm0,%xmm4 pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,222,209 pxor %xmm0,%xmm7 pxor %xmm0,%xmm8 .byte 102,15,56,222,217 pxor %xmm0,%xmm9 movups (%rcx,%rax,1),%xmm0 addq $16,%rax jmp L$dec_loop8_inner .p2align 4 L$dec_loop8: .byte 102,15,56,222,209 .byte 102,15,56,222,217 L$dec_loop8_inner: .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 L$dec_loop8_enter: movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups -16(%rcx,%rax,1),%xmm0 jnz L$dec_loop8 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 102,15,56,223,240 .byte 102,15,56,223,248 .byte 102,68,15,56,223,192 .byte 102,68,15,56,223,200 .byte 0xf3,0xc3 .globl _aes_hw_ecb_encrypt .private_extern _aes_hw_ecb_encrypt .p2align 4 _aes_hw_ecb_encrypt: _CET_ENDBR andq $-16,%rdx jz L$ecb_ret movl 240(%rcx),%eax movups (%rcx),%xmm0 movq %rcx,%r11 movl %eax,%r10d testl %r8d,%r8d jz L$ecb_decrypt cmpq $0x80,%rdx jb L$ecb_enc_tail movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 movdqu 48(%rdi),%xmm5 movdqu 64(%rdi),%xmm6 movdqu 80(%rdi),%xmm7 movdqu 96(%rdi),%xmm8 movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi subq $0x80,%rdx jmp L$ecb_enc_loop8_enter .p2align 4 L$ecb_enc_loop8: movups %xmm2,(%rsi) movq %r11,%rcx movdqu (%rdi),%xmm2 movl %r10d,%eax movups %xmm3,16(%rsi) movdqu 16(%rdi),%xmm3 movups %xmm4,32(%rsi) movdqu 32(%rdi),%xmm4 movups %xmm5,48(%rsi) movdqu 48(%rdi),%xmm5 movups %xmm6,64(%rsi) movdqu 64(%rdi),%xmm6 movups %xmm7,80(%rsi) movdqu 80(%rdi),%xmm7 movups %xmm8,96(%rsi) movdqu 96(%rdi),%xmm8 movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi L$ecb_enc_loop8_enter: call _aesni_encrypt8 subq $0x80,%rdx jnc L$ecb_enc_loop8 movups %xmm2,(%rsi) movq %r11,%rcx movups %xmm3,16(%rsi) movl %r10d,%eax movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) movups %xmm8,96(%rsi) movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi addq $0x80,%rdx jz L$ecb_ret L$ecb_enc_tail: movups (%rdi),%xmm2 cmpq $0x20,%rdx jb L$ecb_enc_one movups 16(%rdi),%xmm3 je L$ecb_enc_two movups 32(%rdi),%xmm4 cmpq $0x40,%rdx jb L$ecb_enc_three movups 48(%rdi),%xmm5 je L$ecb_enc_four movups 64(%rdi),%xmm6 cmpq $0x60,%rdx jb L$ecb_enc_five movups 80(%rdi),%xmm7 je L$ecb_enc_six movdqu 96(%rdi),%xmm8 xorps %xmm9,%xmm9 call _aesni_encrypt8 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) movups %xmm8,96(%rsi) jmp L$ecb_ret .p2align 4 L$ecb_enc_one: movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_enc1_3: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_enc1_3 .byte 102,15,56,221,209 movups %xmm2,(%rsi) jmp L$ecb_ret .p2align 4 L$ecb_enc_two: call _aesni_encrypt2 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) jmp L$ecb_ret .p2align 4 L$ecb_enc_three: call _aesni_encrypt3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) jmp L$ecb_ret .p2align 4 L$ecb_enc_four: call _aesni_encrypt4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) jmp L$ecb_ret .p2align 4 L$ecb_enc_five: xorps %xmm7,%xmm7 call _aesni_encrypt6 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) jmp L$ecb_ret .p2align 4 L$ecb_enc_six: call _aesni_encrypt6 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) jmp L$ecb_ret .p2align 4 L$ecb_decrypt: cmpq $0x80,%rdx jb L$ecb_dec_tail movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 movdqu 48(%rdi),%xmm5 movdqu 64(%rdi),%xmm6 movdqu 80(%rdi),%xmm7 movdqu 96(%rdi),%xmm8 movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi subq $0x80,%rdx jmp L$ecb_dec_loop8_enter .p2align 4 L$ecb_dec_loop8: movups %xmm2,(%rsi) movq %r11,%rcx movdqu (%rdi),%xmm2 movl %r10d,%eax movups %xmm3,16(%rsi) movdqu 16(%rdi),%xmm3 movups %xmm4,32(%rsi) movdqu 32(%rdi),%xmm4 movups %xmm5,48(%rsi) movdqu 48(%rdi),%xmm5 movups %xmm6,64(%rsi) movdqu 64(%rdi),%xmm6 movups %xmm7,80(%rsi) movdqu 80(%rdi),%xmm7 movups %xmm8,96(%rsi) movdqu 96(%rdi),%xmm8 movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi L$ecb_dec_loop8_enter: call _aesni_decrypt8 movups (%r11),%xmm0 subq $0x80,%rdx jnc L$ecb_dec_loop8 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movq %r11,%rcx movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movl %r10d,%eax movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 movups %xmm6,64(%rsi) pxor %xmm6,%xmm6 movups %xmm7,80(%rsi) pxor %xmm7,%xmm7 movups %xmm8,96(%rsi) pxor %xmm8,%xmm8 movups %xmm9,112(%rsi) pxor %xmm9,%xmm9 leaq 128(%rsi),%rsi addq $0x80,%rdx jz L$ecb_ret L$ecb_dec_tail: movups (%rdi),%xmm2 cmpq $0x20,%rdx jb L$ecb_dec_one movups 16(%rdi),%xmm3 je L$ecb_dec_two movups 32(%rdi),%xmm4 cmpq $0x40,%rdx jb L$ecb_dec_three movups 48(%rdi),%xmm5 je L$ecb_dec_four movups 64(%rdi),%xmm6 cmpq $0x60,%rdx jb L$ecb_dec_five movups 80(%rdi),%xmm7 je L$ecb_dec_six movups 96(%rdi),%xmm8 movups (%rcx),%xmm0 xorps %xmm9,%xmm9 call _aesni_decrypt8 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 movups %xmm6,64(%rsi) pxor %xmm6,%xmm6 movups %xmm7,80(%rsi) pxor %xmm7,%xmm7 movups %xmm8,96(%rsi) pxor %xmm8,%xmm8 pxor %xmm9,%xmm9 jmp L$ecb_ret .p2align 4 L$ecb_dec_one: movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_dec1_4: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_dec1_4 .byte 102,15,56,223,209 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 jmp L$ecb_ret .p2align 4 L$ecb_dec_two: call _aesni_decrypt2 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 jmp L$ecb_ret .p2align 4 L$ecb_dec_three: call _aesni_decrypt3 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 jmp L$ecb_ret .p2align 4 L$ecb_dec_four: call _aesni_decrypt4 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 jmp L$ecb_ret .p2align 4 L$ecb_dec_five: xorps %xmm7,%xmm7 call _aesni_decrypt6 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 movups %xmm6,64(%rsi) pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 jmp L$ecb_ret .p2align 4 L$ecb_dec_six: call _aesni_decrypt6 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 movups %xmm6,64(%rsi) pxor %xmm6,%xmm6 movups %xmm7,80(%rsi) pxor %xmm7,%xmm7 L$ecb_ret: xorps %xmm0,%xmm0 pxor %xmm1,%xmm1 .byte 0xf3,0xc3 .globl _aes_hw_ctr32_encrypt_blocks .private_extern _aes_hw_ctr32_encrypt_blocks .p2align 4 _aes_hw_ctr32_encrypt_blocks: _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST movb $1,_BORINGSSL_function_hit(%rip) #endif cmpq $1,%rdx jb L$ctr32_epilogue jne L$ctr32_bulk movups (%r8),%xmm2 movups (%rdi),%xmm3 movl 240(%rcx),%edx movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_enc1_5: .byte 102,15,56,220,209 decl %edx movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_enc1_5 .byte 102,15,56,221,209 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 xorps %xmm3,%xmm2 pxor %xmm3,%xmm3 movups %xmm2,(%rsi) xorps %xmm2,%xmm2 jmp L$ctr32_epilogue .p2align 4 L$ctr32_bulk: leaq (%rsp),%r11 pushq %rbp subq $128,%rsp andq $-16,%rsp movdqu (%r8),%xmm2 movdqu (%rcx),%xmm0 movl 12(%r8),%r8d pxor %xmm0,%xmm2 movl 12(%rcx),%ebp movdqa %xmm2,0(%rsp) bswapl %r8d movdqa %xmm2,%xmm3 movdqa %xmm2,%xmm4 movdqa %xmm2,%xmm5 movdqa %xmm2,64(%rsp) movdqa %xmm2,80(%rsp) movdqa %xmm2,96(%rsp) movq %rdx,%r10 movdqa %xmm2,112(%rsp) leaq 1(%r8),%rax leaq 2(%r8),%rdx bswapl %eax bswapl %edx xorl %ebp,%eax xorl %ebp,%edx .byte 102,15,58,34,216,3 leaq 3(%r8),%rax movdqa %xmm3,16(%rsp) .byte 102,15,58,34,226,3 bswapl %eax movq %r10,%rdx leaq 4(%r8),%r10 movdqa %xmm4,32(%rsp) xorl %ebp,%eax bswapl %r10d .byte 102,15,58,34,232,3 xorl %ebp,%r10d movdqa %xmm5,48(%rsp) leaq 5(%r8),%r9 movl %r10d,64+12(%rsp) bswapl %r9d leaq 6(%r8),%r10 movl 240(%rcx),%eax xorl %ebp,%r9d bswapl %r10d movl %r9d,80+12(%rsp) xorl %ebp,%r10d leaq 7(%r8),%r9 movl %r10d,96+12(%rsp) bswapl %r9d xorl %ebp,%r9d movl %r9d,112+12(%rsp) movups 16(%rcx),%xmm1 movdqa 64(%rsp),%xmm6 movdqa 80(%rsp),%xmm7 cmpq $8,%rdx jb L$ctr32_tail leaq 128(%rcx),%rcx subq $8,%rdx jmp L$ctr32_loop8 .p2align 5 L$ctr32_loop8: addl $8,%r8d movdqa 96(%rsp),%xmm8 .byte 102,15,56,220,209 movl %r8d,%r9d movdqa 112(%rsp),%xmm9 .byte 102,15,56,220,217 bswapl %r9d movups 32-128(%rcx),%xmm0 .byte 102,15,56,220,225 xorl %ebp,%r9d nop .byte 102,15,56,220,233 movl %r9d,0+12(%rsp) leaq 1(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 48-128(%rcx),%xmm1 bswapl %r9d .byte 102,15,56,220,208 .byte 102,15,56,220,216 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,224 .byte 102,15,56,220,232 movl %r9d,16+12(%rsp) leaq 2(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 64-128(%rcx),%xmm0 bswapl %r9d .byte 102,15,56,220,209 .byte 102,15,56,220,217 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movl %r9d,32+12(%rsp) leaq 3(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 80-128(%rcx),%xmm1 bswapl %r9d .byte 102,15,56,220,208 .byte 102,15,56,220,216 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,224 .byte 102,15,56,220,232 movl %r9d,48+12(%rsp) leaq 4(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 96-128(%rcx),%xmm0 bswapl %r9d .byte 102,15,56,220,209 .byte 102,15,56,220,217 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movl %r9d,64+12(%rsp) leaq 5(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 112-128(%rcx),%xmm1 bswapl %r9d .byte 102,15,56,220,208 .byte 102,15,56,220,216 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,224 .byte 102,15,56,220,232 movl %r9d,80+12(%rsp) leaq 6(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 128-128(%rcx),%xmm0 bswapl %r9d .byte 102,15,56,220,209 .byte 102,15,56,220,217 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movl %r9d,96+12(%rsp) leaq 7(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 144-128(%rcx),%xmm1 bswapl %r9d .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 xorl %ebp,%r9d movdqu 0(%rdi),%xmm10 .byte 102,15,56,220,232 movl %r9d,112+12(%rsp) cmpl $11,%eax .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 160-128(%rcx),%xmm0 jb L$ctr32_enc_done .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 176-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 192-128(%rcx),%xmm0 je L$ctr32_enc_done .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 208-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 224-128(%rcx),%xmm0 jmp L$ctr32_enc_done .p2align 4 L$ctr32_enc_done: movdqu 16(%rdi),%xmm11 pxor %xmm0,%xmm10 movdqu 32(%rdi),%xmm12 pxor %xmm0,%xmm11 movdqu 48(%rdi),%xmm13 pxor %xmm0,%xmm12 movdqu 64(%rdi),%xmm14 pxor %xmm0,%xmm13 movdqu 80(%rdi),%xmm15 pxor %xmm0,%xmm14 prefetcht0 448(%rdi) prefetcht0 512(%rdi) pxor %xmm0,%xmm15 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movdqu 96(%rdi),%xmm1 leaq 128(%rdi),%rdi .byte 102,65,15,56,221,210 pxor %xmm0,%xmm1 movdqu 112-128(%rdi),%xmm10 .byte 102,65,15,56,221,219 pxor %xmm0,%xmm10 movdqa 0(%rsp),%xmm11 .byte 102,65,15,56,221,228 .byte 102,65,15,56,221,237 movdqa 16(%rsp),%xmm12 movdqa 32(%rsp),%xmm13 .byte 102,65,15,56,221,246 .byte 102,65,15,56,221,255 movdqa 48(%rsp),%xmm14 movdqa 64(%rsp),%xmm15 .byte 102,68,15,56,221,193 movdqa 80(%rsp),%xmm0 movups 16-128(%rcx),%xmm1 .byte 102,69,15,56,221,202 movups %xmm2,(%rsi) movdqa %xmm11,%xmm2 movups %xmm3,16(%rsi) movdqa %xmm12,%xmm3 movups %xmm4,32(%rsi) movdqa %xmm13,%xmm4 movups %xmm5,48(%rsi) movdqa %xmm14,%xmm5 movups %xmm6,64(%rsi) movdqa %xmm15,%xmm6 movups %xmm7,80(%rsi) movdqa %xmm0,%xmm7 movups %xmm8,96(%rsi) movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi subq $8,%rdx jnc L$ctr32_loop8 addq $8,%rdx jz L$ctr32_done leaq -128(%rcx),%rcx L$ctr32_tail: leaq 16(%rcx),%rcx cmpq $4,%rdx jb L$ctr32_loop3 je L$ctr32_loop4 shll $4,%eax movdqa 96(%rsp),%xmm8 pxor %xmm9,%xmm9 movups 16(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 102,15,56,220,217 leaq 32-16(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,220,225 addq $16,%rax movups (%rdi),%xmm10 .byte 102,15,56,220,233 .byte 102,15,56,220,241 movups 16(%rdi),%xmm11 movups 32(%rdi),%xmm12 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 call L$enc_loop8_enter movdqu 48(%rdi),%xmm13 pxor %xmm10,%xmm2 movdqu 64(%rdi),%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm10,%xmm6 movdqu %xmm5,48(%rsi) movdqu %xmm6,64(%rsi) cmpq $6,%rdx jb L$ctr32_done movups 80(%rdi),%xmm11 xorps %xmm11,%xmm7 movups %xmm7,80(%rsi) je L$ctr32_done movups 96(%rdi),%xmm12 xorps %xmm12,%xmm8 movups %xmm8,96(%rsi) jmp L$ctr32_done .p2align 5 L$ctr32_loop4: .byte 102,15,56,220,209 leaq 16(%rcx),%rcx decl %eax .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movups (%rcx),%xmm1 jnz L$ctr32_loop4 .byte 102,15,56,221,209 .byte 102,15,56,221,217 movups (%rdi),%xmm10 movups 16(%rdi),%xmm11 .byte 102,15,56,221,225 .byte 102,15,56,221,233 movups 32(%rdi),%xmm12 movups 48(%rdi),%xmm13 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) xorps %xmm11,%xmm3 movups %xmm3,16(%rsi) pxor %xmm12,%xmm4 movdqu %xmm4,32(%rsi) pxor %xmm13,%xmm5 movdqu %xmm5,48(%rsi) jmp L$ctr32_done .p2align 5 L$ctr32_loop3: .byte 102,15,56,220,209 leaq 16(%rcx),%rcx decl %eax .byte 102,15,56,220,217 .byte 102,15,56,220,225 movups (%rcx),%xmm1 jnz L$ctr32_loop3 .byte 102,15,56,221,209 .byte 102,15,56,221,217 .byte 102,15,56,221,225 movups (%rdi),%xmm10 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) cmpq $2,%rdx jb L$ctr32_done movups 16(%rdi),%xmm11 xorps %xmm11,%xmm3 movups %xmm3,16(%rsi) je L$ctr32_done movups 32(%rdi),%xmm12 xorps %xmm12,%xmm4 movups %xmm4,32(%rsi) L$ctr32_done: xorps %xmm0,%xmm0 xorl %ebp,%ebp pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 movaps %xmm0,0(%rsp) pxor %xmm8,%xmm8 movaps %xmm0,16(%rsp) pxor %xmm9,%xmm9 movaps %xmm0,32(%rsp) pxor %xmm10,%xmm10 movaps %xmm0,48(%rsp) pxor %xmm11,%xmm11 movaps %xmm0,64(%rsp) pxor %xmm12,%xmm12 movaps %xmm0,80(%rsp) pxor %xmm13,%xmm13 movaps %xmm0,96(%rsp) pxor %xmm14,%xmm14 movaps %xmm0,112(%rsp) pxor %xmm15,%xmm15 movq -8(%r11),%rbp leaq (%r11),%rsp L$ctr32_epilogue: .byte 0xf3,0xc3 .globl _aes_hw_xts_encrypt .private_extern _aes_hw_xts_encrypt .p2align 4 _aes_hw_xts_encrypt: _CET_ENDBR leaq (%rsp),%r11 pushq %rbp subq $112,%rsp andq $-16,%rsp movups (%r9),%xmm2 movl 240(%r8),%eax movl 240(%rcx),%r10d movups (%r8),%xmm0 movups 16(%r8),%xmm1 leaq 32(%r8),%r8 xorps %xmm0,%xmm2 L$oop_enc1_6: .byte 102,15,56,220,209 decl %eax movups (%r8),%xmm1 leaq 16(%r8),%r8 jnz L$oop_enc1_6 .byte 102,15,56,221,209 movups (%rcx),%xmm0 movq %rcx,%rbp movl %r10d,%eax shll $4,%r10d movq %rdx,%r9 andq $-16,%rdx movups 16(%rcx,%r10,1),%xmm1 movdqa L$xts_magic(%rip),%xmm8 movdqa %xmm2,%xmm15 pshufd $0x5f,%xmm2,%xmm9 pxor %xmm0,%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm10 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm10 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm11 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm11 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm12 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm12 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm13 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm13 pxor %xmm14,%xmm15 movdqa %xmm15,%xmm14 psrad $31,%xmm9 paddq %xmm15,%xmm15 pand %xmm8,%xmm9 pxor %xmm0,%xmm14 pxor %xmm9,%xmm15 movaps %xmm1,96(%rsp) subq $96,%rdx jc L$xts_enc_short movl $16+96,%eax leaq 32(%rbp,%r10,1),%rcx subq %r10,%rax movups 16(%rbp),%xmm1 movq %rax,%r10 leaq L$xts_magic(%rip),%r8 jmp L$xts_enc_grandloop .p2align 5 L$xts_enc_grandloop: movdqu 0(%rdi),%xmm2 movdqa %xmm0,%xmm8 movdqu 16(%rdi),%xmm3 pxor %xmm10,%xmm2 movdqu 32(%rdi),%xmm4 pxor %xmm11,%xmm3 .byte 102,15,56,220,209 movdqu 48(%rdi),%xmm5 pxor %xmm12,%xmm4 .byte 102,15,56,220,217 movdqu 64(%rdi),%xmm6 pxor %xmm13,%xmm5 .byte 102,15,56,220,225 movdqu 80(%rdi),%xmm7 pxor %xmm15,%xmm8 movdqa 96(%rsp),%xmm9 pxor %xmm14,%xmm6 .byte 102,15,56,220,233 movups 32(%rbp),%xmm0 leaq 96(%rdi),%rdi pxor %xmm8,%xmm7 pxor %xmm9,%xmm10 .byte 102,15,56,220,241 pxor %xmm9,%xmm11 movdqa %xmm10,0(%rsp) .byte 102,15,56,220,249 movups 48(%rbp),%xmm1 pxor %xmm9,%xmm12 .byte 102,15,56,220,208 pxor %xmm9,%xmm13 movdqa %xmm11,16(%rsp) .byte 102,15,56,220,216 pxor %xmm9,%xmm14 movdqa %xmm12,32(%rsp) .byte 102,15,56,220,224 .byte 102,15,56,220,232 pxor %xmm9,%xmm8 movdqa %xmm14,64(%rsp) .byte 102,15,56,220,240 .byte 102,15,56,220,248 movups 64(%rbp),%xmm0 movdqa %xmm8,80(%rsp) pshufd $0x5f,%xmm15,%xmm9 jmp L$xts_enc_loop6 .p2align 5 L$xts_enc_loop6: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 movups -64(%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 movups -80(%rcx,%rax,1),%xmm0 jnz L$xts_enc_loop6 movdqa (%r8),%xmm8 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 paddq %xmm15,%xmm15 psrad $31,%xmm14 .byte 102,15,56,220,217 pand %xmm8,%xmm14 movups (%rbp),%xmm10 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 pxor %xmm14,%xmm15 movaps %xmm10,%xmm11 .byte 102,15,56,220,249 movups -64(%rcx),%xmm1 movdqa %xmm9,%xmm14 .byte 102,15,56,220,208 paddd %xmm9,%xmm9 pxor %xmm15,%xmm10 .byte 102,15,56,220,216 psrad $31,%xmm14 paddq %xmm15,%xmm15 .byte 102,15,56,220,224 .byte 102,15,56,220,232 pand %xmm8,%xmm14 movaps %xmm11,%xmm12 .byte 102,15,56,220,240 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 .byte 102,15,56,220,248 movups -48(%rcx),%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 pxor %xmm15,%xmm11 psrad $31,%xmm14 .byte 102,15,56,220,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movdqa %xmm13,48(%rsp) pxor %xmm14,%xmm15 .byte 102,15,56,220,241 movaps %xmm12,%xmm13 movdqa %xmm9,%xmm14 .byte 102,15,56,220,249 movups -32(%rcx),%xmm1 paddd %xmm9,%xmm9 .byte 102,15,56,220,208 pxor %xmm15,%xmm12 psrad $31,%xmm14 .byte 102,15,56,220,216 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 pxor %xmm14,%xmm15 movaps %xmm13,%xmm14 .byte 102,15,56,220,248 movdqa %xmm9,%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 pxor %xmm15,%xmm13 psrad $31,%xmm0 .byte 102,15,56,220,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm0 .byte 102,15,56,220,225 .byte 102,15,56,220,233 pxor %xmm0,%xmm15 movups (%rbp),%xmm0 .byte 102,15,56,220,241 .byte 102,15,56,220,249 movups 16(%rbp),%xmm1 pxor %xmm15,%xmm14 .byte 102,15,56,221,84,36,0 psrad $31,%xmm9 paddq %xmm15,%xmm15 .byte 102,15,56,221,92,36,16 .byte 102,15,56,221,100,36,32 pand %xmm8,%xmm9 movq %r10,%rax .byte 102,15,56,221,108,36,48 .byte 102,15,56,221,116,36,64 .byte 102,15,56,221,124,36,80 pxor %xmm9,%xmm15 leaq 96(%rsi),%rsi movups %xmm2,-96(%rsi) movups %xmm3,-80(%rsi) movups %xmm4,-64(%rsi) movups %xmm5,-48(%rsi) movups %xmm6,-32(%rsi) movups %xmm7,-16(%rsi) subq $96,%rdx jnc L$xts_enc_grandloop movl $16+96,%eax subl %r10d,%eax movq %rbp,%rcx shrl $4,%eax L$xts_enc_short: movl %eax,%r10d pxor %xmm0,%xmm10 addq $96,%rdx jz L$xts_enc_done pxor %xmm0,%xmm11 cmpq $0x20,%rdx jb L$xts_enc_one pxor %xmm0,%xmm12 je L$xts_enc_two pxor %xmm0,%xmm13 cmpq $0x40,%rdx jb L$xts_enc_three pxor %xmm0,%xmm14 je L$xts_enc_four movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 pxor %xmm10,%xmm2 movdqu 48(%rdi),%xmm5 pxor %xmm11,%xmm3 movdqu 64(%rdi),%xmm6 leaq 80(%rdi),%rdi pxor %xmm12,%xmm4 pxor %xmm13,%xmm5 pxor %xmm14,%xmm6 pxor %xmm7,%xmm7 call _aesni_encrypt6 xorps %xmm10,%xmm2 movdqa %xmm15,%xmm10 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movdqu %xmm2,(%rsi) xorps %xmm13,%xmm5 movdqu %xmm3,16(%rsi) xorps %xmm14,%xmm6 movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) movdqu %xmm6,64(%rsi) leaq 80(%rsi),%rsi jmp L$xts_enc_done .p2align 4 L$xts_enc_one: movups (%rdi),%xmm2 leaq 16(%rdi),%rdi xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_enc1_7: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_enc1_7 .byte 102,15,56,221,209 xorps %xmm10,%xmm2 movdqa %xmm11,%xmm10 movups %xmm2,(%rsi) leaq 16(%rsi),%rsi jmp L$xts_enc_done .p2align 4 L$xts_enc_two: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 leaq 32(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 call _aesni_encrypt2 xorps %xmm10,%xmm2 movdqa %xmm12,%xmm10 xorps %xmm11,%xmm3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) leaq 32(%rsi),%rsi jmp L$xts_enc_done .p2align 4 L$xts_enc_three: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 leaq 48(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 call _aesni_encrypt3 xorps %xmm10,%xmm2 movdqa %xmm13,%xmm10 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) leaq 48(%rsi),%rsi jmp L$xts_enc_done .p2align 4 L$xts_enc_four: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 xorps %xmm10,%xmm2 movups 48(%rdi),%xmm5 leaq 64(%rdi),%rdi xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 xorps %xmm13,%xmm5 call _aesni_encrypt4 pxor %xmm10,%xmm2 movdqa %xmm14,%xmm10 pxor %xmm11,%xmm3 pxor %xmm12,%xmm4 movdqu %xmm2,(%rsi) pxor %xmm13,%xmm5 movdqu %xmm3,16(%rsi) movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) leaq 64(%rsi),%rsi jmp L$xts_enc_done .p2align 4 L$xts_enc_done: andq $15,%r9 jz L$xts_enc_ret movq %r9,%rdx L$xts_enc_steal: movzbl (%rdi),%eax movzbl -16(%rsi),%ecx leaq 1(%rdi),%rdi movb %al,-16(%rsi) movb %cl,0(%rsi) leaq 1(%rsi),%rsi subq $1,%rdx jnz L$xts_enc_steal subq %r9,%rsi movq %rbp,%rcx movl %r10d,%eax movups -16(%rsi),%xmm2 xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_enc1_8: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_enc1_8 .byte 102,15,56,221,209 xorps %xmm10,%xmm2 movups %xmm2,-16(%rsi) L$xts_enc_ret: xorps %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 movaps %xmm0,0(%rsp) pxor %xmm8,%xmm8 movaps %xmm0,16(%rsp) pxor %xmm9,%xmm9 movaps %xmm0,32(%rsp) pxor %xmm10,%xmm10 movaps %xmm0,48(%rsp) pxor %xmm11,%xmm11 movaps %xmm0,64(%rsp) pxor %xmm12,%xmm12 movaps %xmm0,80(%rsp) pxor %xmm13,%xmm13 movaps %xmm0,96(%rsp) pxor %xmm14,%xmm14 pxor %xmm15,%xmm15 movq -8(%r11),%rbp leaq (%r11),%rsp L$xts_enc_epilogue: .byte 0xf3,0xc3 .globl _aes_hw_xts_decrypt .private_extern _aes_hw_xts_decrypt .p2align 4 _aes_hw_xts_decrypt: _CET_ENDBR leaq (%rsp),%r11 pushq %rbp subq $112,%rsp andq $-16,%rsp movups (%r9),%xmm2 movl 240(%r8),%eax movl 240(%rcx),%r10d movups (%r8),%xmm0 movups 16(%r8),%xmm1 leaq 32(%r8),%r8 xorps %xmm0,%xmm2 L$oop_enc1_9: .byte 102,15,56,220,209 decl %eax movups (%r8),%xmm1 leaq 16(%r8),%r8 jnz L$oop_enc1_9 .byte 102,15,56,221,209 xorl %eax,%eax testq $15,%rdx setnz %al shlq $4,%rax subq %rax,%rdx movups (%rcx),%xmm0 movq %rcx,%rbp movl %r10d,%eax shll $4,%r10d movq %rdx,%r9 andq $-16,%rdx movups 16(%rcx,%r10,1),%xmm1 movdqa L$xts_magic(%rip),%xmm8 movdqa %xmm2,%xmm15 pshufd $0x5f,%xmm2,%xmm9 pxor %xmm0,%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm10 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm10 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm11 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm11 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm12 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm12 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm13 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm13 pxor %xmm14,%xmm15 movdqa %xmm15,%xmm14 psrad $31,%xmm9 paddq %xmm15,%xmm15 pand %xmm8,%xmm9 pxor %xmm0,%xmm14 pxor %xmm9,%xmm15 movaps %xmm1,96(%rsp) subq $96,%rdx jc L$xts_dec_short movl $16+96,%eax leaq 32(%rbp,%r10,1),%rcx subq %r10,%rax movups 16(%rbp),%xmm1 movq %rax,%r10 leaq L$xts_magic(%rip),%r8 jmp L$xts_dec_grandloop .p2align 5 L$xts_dec_grandloop: movdqu 0(%rdi),%xmm2 movdqa %xmm0,%xmm8 movdqu 16(%rdi),%xmm3 pxor %xmm10,%xmm2 movdqu 32(%rdi),%xmm4 pxor %xmm11,%xmm3 .byte 102,15,56,222,209 movdqu 48(%rdi),%xmm5 pxor %xmm12,%xmm4 .byte 102,15,56,222,217 movdqu 64(%rdi),%xmm6 pxor %xmm13,%xmm5 .byte 102,15,56,222,225 movdqu 80(%rdi),%xmm7 pxor %xmm15,%xmm8 movdqa 96(%rsp),%xmm9 pxor %xmm14,%xmm6 .byte 102,15,56,222,233 movups 32(%rbp),%xmm0 leaq 96(%rdi),%rdi pxor %xmm8,%xmm7 pxor %xmm9,%xmm10 .byte 102,15,56,222,241 pxor %xmm9,%xmm11 movdqa %xmm10,0(%rsp) .byte 102,15,56,222,249 movups 48(%rbp),%xmm1 pxor %xmm9,%xmm12 .byte 102,15,56,222,208 pxor %xmm9,%xmm13 movdqa %xmm11,16(%rsp) .byte 102,15,56,222,216 pxor %xmm9,%xmm14 movdqa %xmm12,32(%rsp) .byte 102,15,56,222,224 .byte 102,15,56,222,232 pxor %xmm9,%xmm8 movdqa %xmm14,64(%rsp) .byte 102,15,56,222,240 .byte 102,15,56,222,248 movups 64(%rbp),%xmm0 movdqa %xmm8,80(%rsp) pshufd $0x5f,%xmm15,%xmm9 jmp L$xts_dec_loop6 .p2align 5 L$xts_dec_loop6: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 movups -64(%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 movups -80(%rcx,%rax,1),%xmm0 jnz L$xts_dec_loop6 movdqa (%r8),%xmm8 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 paddq %xmm15,%xmm15 psrad $31,%xmm14 .byte 102,15,56,222,217 pand %xmm8,%xmm14 movups (%rbp),%xmm10 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 pxor %xmm14,%xmm15 movaps %xmm10,%xmm11 .byte 102,15,56,222,249 movups -64(%rcx),%xmm1 movdqa %xmm9,%xmm14 .byte 102,15,56,222,208 paddd %xmm9,%xmm9 pxor %xmm15,%xmm10 .byte 102,15,56,222,216 psrad $31,%xmm14 paddq %xmm15,%xmm15 .byte 102,15,56,222,224 .byte 102,15,56,222,232 pand %xmm8,%xmm14 movaps %xmm11,%xmm12 .byte 102,15,56,222,240 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 .byte 102,15,56,222,248 movups -48(%rcx),%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 pxor %xmm15,%xmm11 psrad $31,%xmm14 .byte 102,15,56,222,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,222,225 .byte 102,15,56,222,233 movdqa %xmm13,48(%rsp) pxor %xmm14,%xmm15 .byte 102,15,56,222,241 movaps %xmm12,%xmm13 movdqa %xmm9,%xmm14 .byte 102,15,56,222,249 movups -32(%rcx),%xmm1 paddd %xmm9,%xmm9 .byte 102,15,56,222,208 pxor %xmm15,%xmm12 psrad $31,%xmm14 .byte 102,15,56,222,216 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 pxor %xmm14,%xmm15 movaps %xmm13,%xmm14 .byte 102,15,56,222,248 movdqa %xmm9,%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 pxor %xmm15,%xmm13 psrad $31,%xmm0 .byte 102,15,56,222,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm0 .byte 102,15,56,222,225 .byte 102,15,56,222,233 pxor %xmm0,%xmm15 movups (%rbp),%xmm0 .byte 102,15,56,222,241 .byte 102,15,56,222,249 movups 16(%rbp),%xmm1 pxor %xmm15,%xmm14 .byte 102,15,56,223,84,36,0 psrad $31,%xmm9 paddq %xmm15,%xmm15 .byte 102,15,56,223,92,36,16 .byte 102,15,56,223,100,36,32 pand %xmm8,%xmm9 movq %r10,%rax .byte 102,15,56,223,108,36,48 .byte 102,15,56,223,116,36,64 .byte 102,15,56,223,124,36,80 pxor %xmm9,%xmm15 leaq 96(%rsi),%rsi movups %xmm2,-96(%rsi) movups %xmm3,-80(%rsi) movups %xmm4,-64(%rsi) movups %xmm5,-48(%rsi) movups %xmm6,-32(%rsi) movups %xmm7,-16(%rsi) subq $96,%rdx jnc L$xts_dec_grandloop movl $16+96,%eax subl %r10d,%eax movq %rbp,%rcx shrl $4,%eax L$xts_dec_short: movl %eax,%r10d pxor %xmm0,%xmm10 pxor %xmm0,%xmm11 addq $96,%rdx jz L$xts_dec_done pxor %xmm0,%xmm12 cmpq $0x20,%rdx jb L$xts_dec_one pxor %xmm0,%xmm13 je L$xts_dec_two pxor %xmm0,%xmm14 cmpq $0x40,%rdx jb L$xts_dec_three je L$xts_dec_four movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 pxor %xmm10,%xmm2 movdqu 48(%rdi),%xmm5 pxor %xmm11,%xmm3 movdqu 64(%rdi),%xmm6 leaq 80(%rdi),%rdi pxor %xmm12,%xmm4 pxor %xmm13,%xmm5 pxor %xmm14,%xmm6 call _aesni_decrypt6 xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movdqu %xmm2,(%rsi) xorps %xmm13,%xmm5 movdqu %xmm3,16(%rsi) xorps %xmm14,%xmm6 movdqu %xmm4,32(%rsi) pxor %xmm14,%xmm14 movdqu %xmm5,48(%rsi) pcmpgtd %xmm15,%xmm14 movdqu %xmm6,64(%rsi) leaq 80(%rsi),%rsi pshufd $0x13,%xmm14,%xmm11 andq $15,%r9 jz L$xts_dec_ret movdqa %xmm15,%xmm10 paddq %xmm15,%xmm15 pand %xmm8,%xmm11 pxor %xmm15,%xmm11 jmp L$xts_dec_done2 .p2align 4 L$xts_dec_one: movups (%rdi),%xmm2 leaq 16(%rdi),%rdi xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_dec1_10: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_dec1_10 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movdqa %xmm11,%xmm10 movups %xmm2,(%rsi) movdqa %xmm12,%xmm11 leaq 16(%rsi),%rsi jmp L$xts_dec_done .p2align 4 L$xts_dec_two: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 leaq 32(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 call _aesni_decrypt2 xorps %xmm10,%xmm2 movdqa %xmm12,%xmm10 xorps %xmm11,%xmm3 movdqa %xmm13,%xmm11 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) leaq 32(%rsi),%rsi jmp L$xts_dec_done .p2align 4 L$xts_dec_three: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 leaq 48(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 call _aesni_decrypt3 xorps %xmm10,%xmm2 movdqa %xmm13,%xmm10 xorps %xmm11,%xmm3 movdqa %xmm14,%xmm11 xorps %xmm12,%xmm4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) leaq 48(%rsi),%rsi jmp L$xts_dec_done .p2align 4 L$xts_dec_four: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 xorps %xmm10,%xmm2 movups 48(%rdi),%xmm5 leaq 64(%rdi),%rdi xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 xorps %xmm13,%xmm5 call _aesni_decrypt4 pxor %xmm10,%xmm2 movdqa %xmm14,%xmm10 pxor %xmm11,%xmm3 movdqa %xmm15,%xmm11 pxor %xmm12,%xmm4 movdqu %xmm2,(%rsi) pxor %xmm13,%xmm5 movdqu %xmm3,16(%rsi) movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) leaq 64(%rsi),%rsi jmp L$xts_dec_done .p2align 4 L$xts_dec_done: andq $15,%r9 jz L$xts_dec_ret L$xts_dec_done2: movq %r9,%rdx movq %rbp,%rcx movl %r10d,%eax movups (%rdi),%xmm2 xorps %xmm11,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_dec1_11: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_dec1_11 .byte 102,15,56,223,209 xorps %xmm11,%xmm2 movups %xmm2,(%rsi) L$xts_dec_steal: movzbl 16(%rdi),%eax movzbl (%rsi),%ecx leaq 1(%rdi),%rdi movb %al,(%rsi) movb %cl,16(%rsi) leaq 1(%rsi),%rsi subq $1,%rdx jnz L$xts_dec_steal subq %r9,%rsi movq %rbp,%rcx movl %r10d,%eax movups (%rsi),%xmm2 xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_dec1_12: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_dec1_12 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) L$xts_dec_ret: xorps %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 movaps %xmm0,0(%rsp) pxor %xmm8,%xmm8 movaps %xmm0,16(%rsp) pxor %xmm9,%xmm9 movaps %xmm0,32(%rsp) pxor %xmm10,%xmm10 movaps %xmm0,48(%rsp) pxor %xmm11,%xmm11 movaps %xmm0,64(%rsp) pxor %xmm12,%xmm12 movaps %xmm0,80(%rsp) pxor %xmm13,%xmm13 movaps %xmm0,96(%rsp) pxor %xmm14,%xmm14 pxor %xmm15,%xmm15 movq -8(%r11),%rbp leaq (%r11),%rsp L$xts_dec_epilogue: .byte 0xf3,0xc3 .globl _aes_hw_cbc_encrypt .private_extern _aes_hw_cbc_encrypt .p2align 4 _aes_hw_cbc_encrypt: _CET_ENDBR testq %rdx,%rdx jz L$cbc_ret movl 240(%rcx),%r10d movq %rcx,%r11 testl %r9d,%r9d jz L$cbc_decrypt movups (%r8),%xmm2 movl %r10d,%eax cmpq $16,%rdx jb L$cbc_enc_tail subq $16,%rdx jmp L$cbc_enc_loop .p2align 4 L$cbc_enc_loop: movups (%rdi),%xmm3 leaq 16(%rdi),%rdi movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 xorps %xmm0,%xmm3 leaq 32(%rcx),%rcx xorps %xmm3,%xmm2 L$oop_enc1_13: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_enc1_13 .byte 102,15,56,221,209 movl %r10d,%eax movq %r11,%rcx movups %xmm2,0(%rsi) leaq 16(%rsi),%rsi subq $16,%rdx jnc L$cbc_enc_loop addq $16,%rdx jnz L$cbc_enc_tail pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 movups %xmm2,(%r8) pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 jmp L$cbc_ret L$cbc_enc_tail: movq %rdx,%rcx xchgq %rdi,%rsi .long 0x9066A4F3 movl $16,%ecx subq %rdx,%rcx xorl %eax,%eax .long 0x9066AAF3 leaq -16(%rdi),%rdi movl %r10d,%eax movq %rdi,%rsi movq %r11,%rcx xorq %rdx,%rdx jmp L$cbc_enc_loop .p2align 4 L$cbc_decrypt: cmpq $16,%rdx jne L$cbc_decrypt_bulk movdqu (%rdi),%xmm2 movdqu (%r8),%xmm3 movdqa %xmm2,%xmm4 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_dec1_14: .byte 102,15,56,222,209 decl %r10d movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_dec1_14 .byte 102,15,56,223,209 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 movdqu %xmm4,(%r8) xorps %xmm3,%xmm2 pxor %xmm3,%xmm3 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 jmp L$cbc_ret .p2align 4 L$cbc_decrypt_bulk: leaq (%rsp),%r11 pushq %rbp subq $16,%rsp andq $-16,%rsp movq %rcx,%rbp movups (%r8),%xmm10 movl %r10d,%eax cmpq $0x50,%rdx jbe L$cbc_dec_tail movups (%rcx),%xmm0 movdqu 0(%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqa %xmm2,%xmm11 movdqu 32(%rdi),%xmm4 movdqa %xmm3,%xmm12 movdqu 48(%rdi),%xmm5 movdqa %xmm4,%xmm13 movdqu 64(%rdi),%xmm6 movdqa %xmm5,%xmm14 movdqu 80(%rdi),%xmm7 movdqa %xmm6,%xmm15 cmpq $0x70,%rdx jbe L$cbc_dec_six_or_seven subq $0x70,%rdx leaq 112(%rcx),%rcx jmp L$cbc_dec_loop8_enter .p2align 4 L$cbc_dec_loop8: movups %xmm9,(%rsi) leaq 16(%rsi),%rsi L$cbc_dec_loop8_enter: movdqu 96(%rdi),%xmm8 pxor %xmm0,%xmm2 movdqu 112(%rdi),%xmm9 pxor %xmm0,%xmm3 movups 16-112(%rcx),%xmm1 pxor %xmm0,%xmm4 movq $-1,%rbp cmpq $0x70,%rdx pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 pxor %xmm0,%xmm7 pxor %xmm0,%xmm8 .byte 102,15,56,222,209 pxor %xmm0,%xmm9 movups 32-112(%rcx),%xmm0 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 adcq $0,%rbp andq $128,%rbp .byte 102,68,15,56,222,201 addq %rdi,%rbp movups 48-112(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 64-112(%rcx),%xmm0 nop .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 80-112(%rcx),%xmm1 nop .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 96-112(%rcx),%xmm0 nop .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 112-112(%rcx),%xmm1 nop .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 128-112(%rcx),%xmm0 nop .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 144-112(%rcx),%xmm1 cmpl $11,%eax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 160-112(%rcx),%xmm0 jb L$cbc_dec_done .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 176-112(%rcx),%xmm1 nop .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 192-112(%rcx),%xmm0 je L$cbc_dec_done .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 208-112(%rcx),%xmm1 nop .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 224-112(%rcx),%xmm0 jmp L$cbc_dec_done .p2align 4 L$cbc_dec_done: .byte 102,15,56,222,209 .byte 102,15,56,222,217 pxor %xmm0,%xmm10 pxor %xmm0,%xmm11 .byte 102,15,56,222,225 .byte 102,15,56,222,233 pxor %xmm0,%xmm12 pxor %xmm0,%xmm13 .byte 102,15,56,222,241 .byte 102,15,56,222,249 pxor %xmm0,%xmm14 pxor %xmm0,%xmm15 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movdqu 80(%rdi),%xmm1 .byte 102,65,15,56,223,210 movdqu 96(%rdi),%xmm10 pxor %xmm0,%xmm1 .byte 102,65,15,56,223,219 pxor %xmm0,%xmm10 movdqu 112(%rdi),%xmm0 .byte 102,65,15,56,223,228 leaq 128(%rdi),%rdi movdqu 0(%rbp),%xmm11 .byte 102,65,15,56,223,237 .byte 102,65,15,56,223,246 movdqu 16(%rbp),%xmm12 movdqu 32(%rbp),%xmm13 .byte 102,65,15,56,223,255 .byte 102,68,15,56,223,193 movdqu 48(%rbp),%xmm14 movdqu 64(%rbp),%xmm15 .byte 102,69,15,56,223,202 movdqa %xmm0,%xmm10 movdqu 80(%rbp),%xmm1 movups -112(%rcx),%xmm0 movups %xmm2,(%rsi) movdqa %xmm11,%xmm2 movups %xmm3,16(%rsi) movdqa %xmm12,%xmm3 movups %xmm4,32(%rsi) movdqa %xmm13,%xmm4 movups %xmm5,48(%rsi) movdqa %xmm14,%xmm5 movups %xmm6,64(%rsi) movdqa %xmm15,%xmm6 movups %xmm7,80(%rsi) movdqa %xmm1,%xmm7 movups %xmm8,96(%rsi) leaq 112(%rsi),%rsi subq $0x80,%rdx ja L$cbc_dec_loop8 movaps %xmm9,%xmm2 leaq -112(%rcx),%rcx addq $0x70,%rdx jle L$cbc_dec_clear_tail_collected movups %xmm9,(%rsi) leaq 16(%rsi),%rsi cmpq $0x50,%rdx jbe L$cbc_dec_tail movaps %xmm11,%xmm2 L$cbc_dec_six_or_seven: cmpq $0x60,%rdx ja L$cbc_dec_seven movaps %xmm7,%xmm8 call _aesni_decrypt6 pxor %xmm10,%xmm2 movaps %xmm8,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm4,%xmm4 pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) pxor %xmm5,%xmm5 pxor %xmm15,%xmm7 movdqu %xmm6,64(%rsi) pxor %xmm6,%xmm6 leaq 80(%rsi),%rsi movdqa %xmm7,%xmm2 pxor %xmm7,%xmm7 jmp L$cbc_dec_tail_collected .p2align 4 L$cbc_dec_seven: movups 96(%rdi),%xmm8 xorps %xmm9,%xmm9 call _aesni_decrypt8 movups 80(%rdi),%xmm9 pxor %xmm10,%xmm2 movups 96(%rdi),%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm4,%xmm4 pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) pxor %xmm5,%xmm5 pxor %xmm15,%xmm7 movdqu %xmm6,64(%rsi) pxor %xmm6,%xmm6 pxor %xmm9,%xmm8 movdqu %xmm7,80(%rsi) pxor %xmm7,%xmm7 leaq 96(%rsi),%rsi movdqa %xmm8,%xmm2 pxor %xmm8,%xmm8 pxor %xmm9,%xmm9 jmp L$cbc_dec_tail_collected L$cbc_dec_tail: movups (%rdi),%xmm2 subq $0x10,%rdx jbe L$cbc_dec_one movups 16(%rdi),%xmm3 movaps %xmm2,%xmm11 subq $0x10,%rdx jbe L$cbc_dec_two movups 32(%rdi),%xmm4 movaps %xmm3,%xmm12 subq $0x10,%rdx jbe L$cbc_dec_three movups 48(%rdi),%xmm5 movaps %xmm4,%xmm13 subq $0x10,%rdx jbe L$cbc_dec_four movups 64(%rdi),%xmm6 movaps %xmm5,%xmm14 movaps %xmm6,%xmm15 xorps %xmm7,%xmm7 call _aesni_decrypt6 pxor %xmm10,%xmm2 movaps %xmm15,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm4,%xmm4 pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) pxor %xmm5,%xmm5 leaq 64(%rsi),%rsi movdqa %xmm6,%xmm2 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 subq $0x10,%rdx jmp L$cbc_dec_tail_collected .p2align 4 L$cbc_dec_one: movaps %xmm2,%xmm11 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 L$oop_dec1_15: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz L$oop_dec1_15 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movaps %xmm11,%xmm10 jmp L$cbc_dec_tail_collected .p2align 4 L$cbc_dec_two: movaps %xmm3,%xmm12 call _aesni_decrypt2 pxor %xmm10,%xmm2 movaps %xmm12,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) movdqa %xmm3,%xmm2 pxor %xmm3,%xmm3 leaq 16(%rsi),%rsi jmp L$cbc_dec_tail_collected .p2align 4 L$cbc_dec_three: movaps %xmm4,%xmm13 call _aesni_decrypt3 pxor %xmm10,%xmm2 movaps %xmm13,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 movdqa %xmm4,%xmm2 pxor %xmm4,%xmm4 leaq 32(%rsi),%rsi jmp L$cbc_dec_tail_collected .p2align 4 L$cbc_dec_four: movaps %xmm5,%xmm14 call _aesni_decrypt4 pxor %xmm10,%xmm2 movaps %xmm14,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm4,%xmm4 movdqa %xmm5,%xmm2 pxor %xmm5,%xmm5 leaq 48(%rsi),%rsi jmp L$cbc_dec_tail_collected .p2align 4 L$cbc_dec_clear_tail_collected: pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 pxor %xmm8,%xmm8 pxor %xmm9,%xmm9 L$cbc_dec_tail_collected: movups %xmm10,(%r8) andq $15,%rdx jnz L$cbc_dec_tail_partial movups %xmm2,(%rsi) pxor %xmm2,%xmm2 jmp L$cbc_dec_ret .p2align 4 L$cbc_dec_tail_partial: movaps %xmm2,(%rsp) pxor %xmm2,%xmm2 movq $16,%rcx movq %rsi,%rdi subq %rdx,%rcx leaq (%rsp),%rsi .long 0x9066A4F3 movdqa %xmm2,(%rsp) L$cbc_dec_ret: xorps %xmm0,%xmm0 pxor %xmm1,%xmm1 movq -8(%r11),%rbp leaq (%r11),%rsp L$cbc_ret: .byte 0xf3,0xc3 .globl _aes_hw_set_decrypt_key .private_extern _aes_hw_set_decrypt_key .p2align 4 _aes_hw_set_decrypt_key: _CET_ENDBR .byte 0x48,0x83,0xEC,0x08 call __aesni_set_encrypt_key shll $4,%esi testl %eax,%eax jnz L$dec_key_ret leaq 16(%rdx,%rsi,1),%rdi movups (%rdx),%xmm0 movups (%rdi),%xmm1 movups %xmm0,(%rdi) movups %xmm1,(%rdx) leaq 16(%rdx),%rdx leaq -16(%rdi),%rdi L$dec_key_inverse: movups (%rdx),%xmm0 movups (%rdi),%xmm1 .byte 102,15,56,219,192 .byte 102,15,56,219,201 leaq 16(%rdx),%rdx leaq -16(%rdi),%rdi movups %xmm0,16(%rdi) movups %xmm1,-16(%rdx) cmpq %rdx,%rdi ja L$dec_key_inverse movups (%rdx),%xmm0 .byte 102,15,56,219,192 pxor %xmm1,%xmm1 movups %xmm0,(%rdi) pxor %xmm0,%xmm0 L$dec_key_ret: addq $8,%rsp .byte 0xf3,0xc3 L$SEH_end_set_decrypt_key: .globl _aes_hw_set_encrypt_key .private_extern _aes_hw_set_encrypt_key .p2align 4 _aes_hw_set_encrypt_key: __aesni_set_encrypt_key: _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST movb $1,_BORINGSSL_function_hit+3(%rip) #endif .byte 0x48,0x83,0xEC,0x08 movq $-1,%rax testq %rdi,%rdi jz L$enc_key_ret testq %rdx,%rdx jz L$enc_key_ret movups (%rdi),%xmm0 xorps %xmm4,%xmm4 leaq _OPENSSL_ia32cap_P(%rip),%r10 movl 4(%r10),%r10d andl $268437504,%r10d leaq 16(%rdx),%rax cmpl $256,%esi je L$14rounds cmpl $192,%esi je L$12rounds cmpl $128,%esi jne L$bad_keybits L$10rounds: movl $9,%esi cmpl $268435456,%r10d je L$10rounds_alt movups %xmm0,(%rdx) .byte 102,15,58,223,200,1 call L$key_expansion_128_cold .byte 102,15,58,223,200,2 call L$key_expansion_128 .byte 102,15,58,223,200,4 call L$key_expansion_128 .byte 102,15,58,223,200,8 call L$key_expansion_128 .byte 102,15,58,223,200,16 call L$key_expansion_128 .byte 102,15,58,223,200,32 call L$key_expansion_128 .byte 102,15,58,223,200,64 call L$key_expansion_128 .byte 102,15,58,223,200,128 call L$key_expansion_128 .byte 102,15,58,223,200,27 call L$key_expansion_128 .byte 102,15,58,223,200,54 call L$key_expansion_128 movups %xmm0,(%rax) movl %esi,80(%rax) xorl %eax,%eax jmp L$enc_key_ret .p2align 4 L$10rounds_alt: movdqa L$key_rotate(%rip),%xmm5 movl $8,%r10d movdqa L$key_rcon1(%rip),%xmm4 movdqa %xmm0,%xmm2 movdqu %xmm0,(%rdx) jmp L$oop_key128 .p2align 4 L$oop_key128: .byte 102,15,56,0,197 .byte 102,15,56,221,196 pslld $1,%xmm4 leaq 16(%rax),%rax movdqa %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm3,%xmm2 pxor %xmm2,%xmm0 movdqu %xmm0,-16(%rax) movdqa %xmm0,%xmm2 decl %r10d jnz L$oop_key128 movdqa L$key_rcon1b(%rip),%xmm4 .byte 102,15,56,0,197 .byte 102,15,56,221,196 pslld $1,%xmm4 movdqa %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm3,%xmm2 pxor %xmm2,%xmm0 movdqu %xmm0,(%rax) movdqa %xmm0,%xmm2 .byte 102,15,56,0,197 .byte 102,15,56,221,196 movdqa %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm3,%xmm2 pxor %xmm2,%xmm0 movdqu %xmm0,16(%rax) movl %esi,96(%rax) xorl %eax,%eax jmp L$enc_key_ret .p2align 4 L$12rounds: movq 16(%rdi),%xmm2 movl $11,%esi cmpl $268435456,%r10d je L$12rounds_alt movups %xmm0,(%rdx) .byte 102,15,58,223,202,1 call L$key_expansion_192a_cold .byte 102,15,58,223,202,2 call L$key_expansion_192b .byte 102,15,58,223,202,4 call L$key_expansion_192a .byte 102,15,58,223,202,8 call L$key_expansion_192b .byte 102,15,58,223,202,16 call L$key_expansion_192a .byte 102,15,58,223,202,32 call L$key_expansion_192b .byte 102,15,58,223,202,64 call L$key_expansion_192a .byte 102,15,58,223,202,128 call L$key_expansion_192b movups %xmm0,(%rax) movl %esi,48(%rax) xorq %rax,%rax jmp L$enc_key_ret .p2align 4 L$12rounds_alt: movdqa L$key_rotate192(%rip),%xmm5 movdqa L$key_rcon1(%rip),%xmm4 movl $8,%r10d movdqu %xmm0,(%rdx) jmp L$oop_key192 .p2align 4 L$oop_key192: movq %xmm2,0(%rax) movdqa %xmm2,%xmm1 .byte 102,15,56,0,213 .byte 102,15,56,221,212 pslld $1,%xmm4 leaq 24(%rax),%rax movdqa %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm3,%xmm0 pshufd $0xff,%xmm0,%xmm3 pxor %xmm1,%xmm3 pslldq $4,%xmm1 pxor %xmm1,%xmm3 pxor %xmm2,%xmm0 pxor %xmm3,%xmm2 movdqu %xmm0,-16(%rax) decl %r10d jnz L$oop_key192 movl %esi,32(%rax) xorl %eax,%eax jmp L$enc_key_ret .p2align 4 L$14rounds: movups 16(%rdi),%xmm2 movl $13,%esi leaq 16(%rax),%rax cmpl $268435456,%r10d je L$14rounds_alt movups %xmm0,(%rdx) movups %xmm2,16(%rdx) .byte 102,15,58,223,202,1 call L$key_expansion_256a_cold .byte 102,15,58,223,200,1 call L$key_expansion_256b .byte 102,15,58,223,202,2 call L$key_expansion_256a .byte 102,15,58,223,200,2 call L$key_expansion_256b .byte 102,15,58,223,202,4 call L$key_expansion_256a .byte 102,15,58,223,200,4 call L$key_expansion_256b .byte 102,15,58,223,202,8 call L$key_expansion_256a .byte 102,15,58,223,200,8 call L$key_expansion_256b .byte 102,15,58,223,202,16 call L$key_expansion_256a .byte 102,15,58,223,200,16 call L$key_expansion_256b .byte 102,15,58,223,202,32 call L$key_expansion_256a .byte 102,15,58,223,200,32 call L$key_expansion_256b .byte 102,15,58,223,202,64 call L$key_expansion_256a movups %xmm0,(%rax) movl %esi,16(%rax) xorq %rax,%rax jmp L$enc_key_ret .p2align 4 L$14rounds_alt: movdqa L$key_rotate(%rip),%xmm5 movdqa L$key_rcon1(%rip),%xmm4 movl $7,%r10d movdqu %xmm0,0(%rdx) movdqa %xmm2,%xmm1 movdqu %xmm2,16(%rdx) jmp L$oop_key256 .p2align 4 L$oop_key256: .byte 102,15,56,0,213 .byte 102,15,56,221,212 movdqa %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm3,%xmm0 pslld $1,%xmm4 pxor %xmm2,%xmm0 movdqu %xmm0,(%rax) decl %r10d jz L$done_key256 pshufd $0xff,%xmm0,%xmm2 pxor %xmm3,%xmm3 .byte 102,15,56,221,211 movdqa %xmm1,%xmm3 pslldq $4,%xmm1 pxor %xmm1,%xmm3 pslldq $4,%xmm1 pxor %xmm1,%xmm3 pslldq $4,%xmm1 pxor %xmm3,%xmm1 pxor %xmm1,%xmm2 movdqu %xmm2,16(%rax) leaq 32(%rax),%rax movdqa %xmm2,%xmm1 jmp L$oop_key256 L$done_key256: movl %esi,16(%rax) xorl %eax,%eax jmp L$enc_key_ret .p2align 4 L$bad_keybits: movq $-2,%rax L$enc_key_ret: pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 addq $8,%rsp .byte 0xf3,0xc3 L$SEH_end_set_encrypt_key: .p2align 4 L$key_expansion_128: movups %xmm0,(%rax) leaq 16(%rax),%rax L$key_expansion_128_cold: shufps $16,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $255,%xmm1,%xmm1 xorps %xmm1,%xmm0 .byte 0xf3,0xc3 .p2align 4 L$key_expansion_192a: movups %xmm0,(%rax) leaq 16(%rax),%rax L$key_expansion_192a_cold: movaps %xmm2,%xmm5 L$key_expansion_192b_warm: shufps $16,%xmm0,%xmm4 movdqa %xmm2,%xmm3 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 pslldq $4,%xmm3 xorps %xmm4,%xmm0 pshufd $85,%xmm1,%xmm1 pxor %xmm3,%xmm2 pxor %xmm1,%xmm0 pshufd $255,%xmm0,%xmm3 pxor %xmm3,%xmm2 .byte 0xf3,0xc3 .p2align 4 L$key_expansion_192b: movaps %xmm0,%xmm3 shufps $68,%xmm0,%xmm5 movups %xmm5,(%rax) shufps $78,%xmm2,%xmm3 movups %xmm3,16(%rax) leaq 32(%rax),%rax jmp L$key_expansion_192b_warm .p2align 4 L$key_expansion_256a: movups %xmm2,(%rax) leaq 16(%rax),%rax L$key_expansion_256a_cold: shufps $16,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $255,%xmm1,%xmm1 xorps %xmm1,%xmm0 .byte 0xf3,0xc3 .p2align 4 L$key_expansion_256b: movups %xmm0,(%rax) leaq 16(%rax),%rax shufps $16,%xmm2,%xmm4 xorps %xmm4,%xmm2 shufps $140,%xmm2,%xmm4 xorps %xmm4,%xmm2 shufps $170,%xmm1,%xmm1 xorps %xmm1,%xmm2 .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 L$bswap_mask: .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 L$increment32: .long 6,6,6,0 L$increment64: .long 1,0,0,0 L$xts_magic: .long 0x87,0,1,0 L$increment1: .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 L$key_rotate: .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d L$key_rotate192: .long 0x04070605,0x04070605,0x04070605,0x04070605 L$key_rcon1: .long 1,1,1,1 L$key_rcon1b: .long 0x1b,0x1b,0x1b,0x1b .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .p2align 6 .text #endif
marvin-hansen/iggy-streaming-system
32,652
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/chacha/chacha-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .section __DATA,__const .p2align 6 L$zero: .long 0,0,0,0 L$one: .long 1,0,0,0 L$inc: .long 0,1,2,3 L$four: .long 4,4,4,4 L$incy: .long 0,2,4,6,1,3,5,7 L$eight: .long 8,8,8,8,8,8,8,8 L$rot16: .byte 0x2,0x3,0x0,0x1, 0x6,0x7,0x4,0x5, 0xa,0xb,0x8,0x9, 0xe,0xf,0xc,0xd L$rot24: .byte 0x3,0x0,0x1,0x2, 0x7,0x4,0x5,0x6, 0xb,0x8,0x9,0xa, 0xf,0xc,0xd,0xe L$sigma: .byte 101,120,112,97,110,100,32,51,50,45,98,121,116,101,32,107,0 .p2align 6 L$zeroz: .long 0,0,0,0, 1,0,0,0, 2,0,0,0, 3,0,0,0 L$fourz: .long 4,0,0,0, 4,0,0,0, 4,0,0,0, 4,0,0,0 L$incz: .long 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 L$sixteen: .long 16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16 .byte 67,104,97,67,104,97,50,48,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .globl _ChaCha20_ctr32_nohw .private_extern _ChaCha20_ctr32_nohw .p2align 6 _ChaCha20_ctr32_nohw: _CET_ENDBR pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $64+24,%rsp L$ctr32_body: movdqu (%rcx),%xmm1 movdqu 16(%rcx),%xmm2 movdqu (%r8),%xmm3 movdqa L$one(%rip),%xmm4 movdqa %xmm1,16(%rsp) movdqa %xmm2,32(%rsp) movdqa %xmm3,48(%rsp) movq %rdx,%rbp jmp L$oop_outer .p2align 5 L$oop_outer: movl $0x61707865,%eax movl $0x3320646e,%ebx movl $0x79622d32,%ecx movl $0x6b206574,%edx movl 16(%rsp),%r8d movl 20(%rsp),%r9d movl 24(%rsp),%r10d movl 28(%rsp),%r11d movd %xmm3,%r12d movl 52(%rsp),%r13d movl 56(%rsp),%r14d movl 60(%rsp),%r15d movq %rbp,64+0(%rsp) movl $10,%ebp movq %rsi,64+8(%rsp) .byte 102,72,15,126,214 movq %rdi,64+16(%rsp) movq %rsi,%rdi shrq $32,%rdi jmp L$oop .p2align 5 L$oop: addl %r8d,%eax xorl %eax,%r12d roll $16,%r12d addl %r9d,%ebx xorl %ebx,%r13d roll $16,%r13d addl %r12d,%esi xorl %esi,%r8d roll $12,%r8d addl %r13d,%edi xorl %edi,%r9d roll $12,%r9d addl %r8d,%eax xorl %eax,%r12d roll $8,%r12d addl %r9d,%ebx xorl %ebx,%r13d roll $8,%r13d addl %r12d,%esi xorl %esi,%r8d roll $7,%r8d addl %r13d,%edi xorl %edi,%r9d roll $7,%r9d movl %esi,32(%rsp) movl %edi,36(%rsp) movl 40(%rsp),%esi movl 44(%rsp),%edi addl %r10d,%ecx xorl %ecx,%r14d roll $16,%r14d addl %r11d,%edx xorl %edx,%r15d roll $16,%r15d addl %r14d,%esi xorl %esi,%r10d roll $12,%r10d addl %r15d,%edi xorl %edi,%r11d roll $12,%r11d addl %r10d,%ecx xorl %ecx,%r14d roll $8,%r14d addl %r11d,%edx xorl %edx,%r15d roll $8,%r15d addl %r14d,%esi xorl %esi,%r10d roll $7,%r10d addl %r15d,%edi xorl %edi,%r11d roll $7,%r11d addl %r9d,%eax xorl %eax,%r15d roll $16,%r15d addl %r10d,%ebx xorl %ebx,%r12d roll $16,%r12d addl %r15d,%esi xorl %esi,%r9d roll $12,%r9d addl %r12d,%edi xorl %edi,%r10d roll $12,%r10d addl %r9d,%eax xorl %eax,%r15d roll $8,%r15d addl %r10d,%ebx xorl %ebx,%r12d roll $8,%r12d addl %r15d,%esi xorl %esi,%r9d roll $7,%r9d addl %r12d,%edi xorl %edi,%r10d roll $7,%r10d movl %esi,40(%rsp) movl %edi,44(%rsp) movl 32(%rsp),%esi movl 36(%rsp),%edi addl %r11d,%ecx xorl %ecx,%r13d roll $16,%r13d addl %r8d,%edx xorl %edx,%r14d roll $16,%r14d addl %r13d,%esi xorl %esi,%r11d roll $12,%r11d addl %r14d,%edi xorl %edi,%r8d roll $12,%r8d addl %r11d,%ecx xorl %ecx,%r13d roll $8,%r13d addl %r8d,%edx xorl %edx,%r14d roll $8,%r14d addl %r13d,%esi xorl %esi,%r11d roll $7,%r11d addl %r14d,%edi xorl %edi,%r8d roll $7,%r8d decl %ebp jnz L$oop movl %edi,36(%rsp) movl %esi,32(%rsp) movq 64(%rsp),%rbp movdqa %xmm2,%xmm1 movq 64+8(%rsp),%rsi paddd %xmm4,%xmm3 movq 64+16(%rsp),%rdi addl $0x61707865,%eax addl $0x3320646e,%ebx addl $0x79622d32,%ecx addl $0x6b206574,%edx addl 16(%rsp),%r8d addl 20(%rsp),%r9d addl 24(%rsp),%r10d addl 28(%rsp),%r11d addl 48(%rsp),%r12d addl 52(%rsp),%r13d addl 56(%rsp),%r14d addl 60(%rsp),%r15d paddd 32(%rsp),%xmm1 cmpq $64,%rbp jb L$tail xorl 0(%rsi),%eax xorl 4(%rsi),%ebx xorl 8(%rsi),%ecx xorl 12(%rsi),%edx xorl 16(%rsi),%r8d xorl 20(%rsi),%r9d xorl 24(%rsi),%r10d xorl 28(%rsi),%r11d movdqu 32(%rsi),%xmm0 xorl 48(%rsi),%r12d xorl 52(%rsi),%r13d xorl 56(%rsi),%r14d xorl 60(%rsi),%r15d leaq 64(%rsi),%rsi pxor %xmm1,%xmm0 movdqa %xmm2,32(%rsp) movd %xmm3,48(%rsp) movl %eax,0(%rdi) movl %ebx,4(%rdi) movl %ecx,8(%rdi) movl %edx,12(%rdi) movl %r8d,16(%rdi) movl %r9d,20(%rdi) movl %r10d,24(%rdi) movl %r11d,28(%rdi) movdqu %xmm0,32(%rdi) movl %r12d,48(%rdi) movl %r13d,52(%rdi) movl %r14d,56(%rdi) movl %r15d,60(%rdi) leaq 64(%rdi),%rdi subq $64,%rbp jnz L$oop_outer jmp L$done .p2align 4 L$tail: movl %eax,0(%rsp) movl %ebx,4(%rsp) xorq %rbx,%rbx movl %ecx,8(%rsp) movl %edx,12(%rsp) movl %r8d,16(%rsp) movl %r9d,20(%rsp) movl %r10d,24(%rsp) movl %r11d,28(%rsp) movdqa %xmm1,32(%rsp) movl %r12d,48(%rsp) movl %r13d,52(%rsp) movl %r14d,56(%rsp) movl %r15d,60(%rsp) L$oop_tail: movzbl (%rsi,%rbx,1),%eax movzbl (%rsp,%rbx,1),%edx leaq 1(%rbx),%rbx xorl %edx,%eax movb %al,-1(%rdi,%rbx,1) decq %rbp jnz L$oop_tail L$done: leaq 64+24+48(%rsp),%rsi movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$no_data: .byte 0xf3,0xc3 .globl _ChaCha20_ctr32_ssse3 .private_extern _ChaCha20_ctr32_ssse3 .p2align 5 _ChaCha20_ctr32_ssse3: _CET_ENDBR movq %rsp,%r9 subq $64+8,%rsp movdqa L$sigma(%rip),%xmm0 movdqu (%rcx),%xmm1 movdqu 16(%rcx),%xmm2 movdqu (%r8),%xmm3 movdqa L$rot16(%rip),%xmm6 movdqa L$rot24(%rip),%xmm7 movdqa %xmm0,0(%rsp) movdqa %xmm1,16(%rsp) movdqa %xmm2,32(%rsp) movdqa %xmm3,48(%rsp) movq $10,%r8 jmp L$oop_ssse3 .p2align 5 L$oop_outer_ssse3: movdqa L$one(%rip),%xmm3 movdqa 0(%rsp),%xmm0 movdqa 16(%rsp),%xmm1 movdqa 32(%rsp),%xmm2 paddd 48(%rsp),%xmm3 movq $10,%r8 movdqa %xmm3,48(%rsp) jmp L$oop_ssse3 .p2align 5 L$oop_ssse3: paddd %xmm1,%xmm0 pxor %xmm0,%xmm3 .byte 102,15,56,0,222 paddd %xmm3,%xmm2 pxor %xmm2,%xmm1 movdqa %xmm1,%xmm4 psrld $20,%xmm1 pslld $12,%xmm4 por %xmm4,%xmm1 paddd %xmm1,%xmm0 pxor %xmm0,%xmm3 .byte 102,15,56,0,223 paddd %xmm3,%xmm2 pxor %xmm2,%xmm1 movdqa %xmm1,%xmm4 psrld $25,%xmm1 pslld $7,%xmm4 por %xmm4,%xmm1 pshufd $78,%xmm2,%xmm2 pshufd $57,%xmm1,%xmm1 pshufd $147,%xmm3,%xmm3 nop paddd %xmm1,%xmm0 pxor %xmm0,%xmm3 .byte 102,15,56,0,222 paddd %xmm3,%xmm2 pxor %xmm2,%xmm1 movdqa %xmm1,%xmm4 psrld $20,%xmm1 pslld $12,%xmm4 por %xmm4,%xmm1 paddd %xmm1,%xmm0 pxor %xmm0,%xmm3 .byte 102,15,56,0,223 paddd %xmm3,%xmm2 pxor %xmm2,%xmm1 movdqa %xmm1,%xmm4 psrld $25,%xmm1 pslld $7,%xmm4 por %xmm4,%xmm1 pshufd $78,%xmm2,%xmm2 pshufd $147,%xmm1,%xmm1 pshufd $57,%xmm3,%xmm3 decq %r8 jnz L$oop_ssse3 paddd 0(%rsp),%xmm0 paddd 16(%rsp),%xmm1 paddd 32(%rsp),%xmm2 paddd 48(%rsp),%xmm3 cmpq $64,%rdx jb L$tail_ssse3 movdqu 0(%rsi),%xmm4 movdqu 16(%rsi),%xmm5 pxor %xmm4,%xmm0 movdqu 32(%rsi),%xmm4 pxor %xmm5,%xmm1 movdqu 48(%rsi),%xmm5 leaq 64(%rsi),%rsi pxor %xmm4,%xmm2 pxor %xmm5,%xmm3 movdqu %xmm0,0(%rdi) movdqu %xmm1,16(%rdi) movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) leaq 64(%rdi),%rdi subq $64,%rdx jnz L$oop_outer_ssse3 jmp L$done_ssse3 .p2align 4 L$tail_ssse3: movdqa %xmm0,0(%rsp) movdqa %xmm1,16(%rsp) movdqa %xmm2,32(%rsp) movdqa %xmm3,48(%rsp) xorq %r8,%r8 L$oop_tail_ssse3: movzbl (%rsi,%r8,1),%eax movzbl (%rsp,%r8,1),%ecx leaq 1(%r8),%r8 xorl %ecx,%eax movb %al,-1(%rdi,%r8,1) decq %rdx jnz L$oop_tail_ssse3 L$done_ssse3: leaq (%r9),%rsp L$ssse3_epilogue: .byte 0xf3,0xc3 .globl _ChaCha20_ctr32_ssse3_4x .private_extern _ChaCha20_ctr32_ssse3_4x .p2align 5 _ChaCha20_ctr32_ssse3_4x: _CET_ENDBR movq %rsp,%r9 movq %r10,%r11 subq $0x140+8,%rsp movdqa L$sigma(%rip),%xmm11 movdqu (%rcx),%xmm15 movdqu 16(%rcx),%xmm7 movdqu (%r8),%xmm3 leaq 256(%rsp),%rcx leaq L$rot16(%rip),%r10 leaq L$rot24(%rip),%r11 pshufd $0x00,%xmm11,%xmm8 pshufd $0x55,%xmm11,%xmm9 movdqa %xmm8,64(%rsp) pshufd $0xaa,%xmm11,%xmm10 movdqa %xmm9,80(%rsp) pshufd $0xff,%xmm11,%xmm11 movdqa %xmm10,96(%rsp) movdqa %xmm11,112(%rsp) pshufd $0x00,%xmm15,%xmm12 pshufd $0x55,%xmm15,%xmm13 movdqa %xmm12,128-256(%rcx) pshufd $0xaa,%xmm15,%xmm14 movdqa %xmm13,144-256(%rcx) pshufd $0xff,%xmm15,%xmm15 movdqa %xmm14,160-256(%rcx) movdqa %xmm15,176-256(%rcx) pshufd $0x00,%xmm7,%xmm4 pshufd $0x55,%xmm7,%xmm5 movdqa %xmm4,192-256(%rcx) pshufd $0xaa,%xmm7,%xmm6 movdqa %xmm5,208-256(%rcx) pshufd $0xff,%xmm7,%xmm7 movdqa %xmm6,224-256(%rcx) movdqa %xmm7,240-256(%rcx) pshufd $0x00,%xmm3,%xmm0 pshufd $0x55,%xmm3,%xmm1 paddd L$inc(%rip),%xmm0 pshufd $0xaa,%xmm3,%xmm2 movdqa %xmm1,272-256(%rcx) pshufd $0xff,%xmm3,%xmm3 movdqa %xmm2,288-256(%rcx) movdqa %xmm3,304-256(%rcx) jmp L$oop_enter4x .p2align 5 L$oop_outer4x: movdqa 64(%rsp),%xmm8 movdqa 80(%rsp),%xmm9 movdqa 96(%rsp),%xmm10 movdqa 112(%rsp),%xmm11 movdqa 128-256(%rcx),%xmm12 movdqa 144-256(%rcx),%xmm13 movdqa 160-256(%rcx),%xmm14 movdqa 176-256(%rcx),%xmm15 movdqa 192-256(%rcx),%xmm4 movdqa 208-256(%rcx),%xmm5 movdqa 224-256(%rcx),%xmm6 movdqa 240-256(%rcx),%xmm7 movdqa 256-256(%rcx),%xmm0 movdqa 272-256(%rcx),%xmm1 movdqa 288-256(%rcx),%xmm2 movdqa 304-256(%rcx),%xmm3 paddd L$four(%rip),%xmm0 L$oop_enter4x: movdqa %xmm6,32(%rsp) movdqa %xmm7,48(%rsp) movdqa (%r10),%xmm7 movl $10,%eax movdqa %xmm0,256-256(%rcx) jmp L$oop4x .p2align 5 L$oop4x: paddd %xmm12,%xmm8 paddd %xmm13,%xmm9 pxor %xmm8,%xmm0 pxor %xmm9,%xmm1 .byte 102,15,56,0,199 .byte 102,15,56,0,207 paddd %xmm0,%xmm4 paddd %xmm1,%xmm5 pxor %xmm4,%xmm12 pxor %xmm5,%xmm13 movdqa %xmm12,%xmm6 pslld $12,%xmm12 psrld $20,%xmm6 movdqa %xmm13,%xmm7 pslld $12,%xmm13 por %xmm6,%xmm12 psrld $20,%xmm7 movdqa (%r11),%xmm6 por %xmm7,%xmm13 paddd %xmm12,%xmm8 paddd %xmm13,%xmm9 pxor %xmm8,%xmm0 pxor %xmm9,%xmm1 .byte 102,15,56,0,198 .byte 102,15,56,0,206 paddd %xmm0,%xmm4 paddd %xmm1,%xmm5 pxor %xmm4,%xmm12 pxor %xmm5,%xmm13 movdqa %xmm12,%xmm7 pslld $7,%xmm12 psrld $25,%xmm7 movdqa %xmm13,%xmm6 pslld $7,%xmm13 por %xmm7,%xmm12 psrld $25,%xmm6 movdqa (%r10),%xmm7 por %xmm6,%xmm13 movdqa %xmm4,0(%rsp) movdqa %xmm5,16(%rsp) movdqa 32(%rsp),%xmm4 movdqa 48(%rsp),%xmm5 paddd %xmm14,%xmm10 paddd %xmm15,%xmm11 pxor %xmm10,%xmm2 pxor %xmm11,%xmm3 .byte 102,15,56,0,215 .byte 102,15,56,0,223 paddd %xmm2,%xmm4 paddd %xmm3,%xmm5 pxor %xmm4,%xmm14 pxor %xmm5,%xmm15 movdqa %xmm14,%xmm6 pslld $12,%xmm14 psrld $20,%xmm6 movdqa %xmm15,%xmm7 pslld $12,%xmm15 por %xmm6,%xmm14 psrld $20,%xmm7 movdqa (%r11),%xmm6 por %xmm7,%xmm15 paddd %xmm14,%xmm10 paddd %xmm15,%xmm11 pxor %xmm10,%xmm2 pxor %xmm11,%xmm3 .byte 102,15,56,0,214 .byte 102,15,56,0,222 paddd %xmm2,%xmm4 paddd %xmm3,%xmm5 pxor %xmm4,%xmm14 pxor %xmm5,%xmm15 movdqa %xmm14,%xmm7 pslld $7,%xmm14 psrld $25,%xmm7 movdqa %xmm15,%xmm6 pslld $7,%xmm15 por %xmm7,%xmm14 psrld $25,%xmm6 movdqa (%r10),%xmm7 por %xmm6,%xmm15 paddd %xmm13,%xmm8 paddd %xmm14,%xmm9 pxor %xmm8,%xmm3 pxor %xmm9,%xmm0 .byte 102,15,56,0,223 .byte 102,15,56,0,199 paddd %xmm3,%xmm4 paddd %xmm0,%xmm5 pxor %xmm4,%xmm13 pxor %xmm5,%xmm14 movdqa %xmm13,%xmm6 pslld $12,%xmm13 psrld $20,%xmm6 movdqa %xmm14,%xmm7 pslld $12,%xmm14 por %xmm6,%xmm13 psrld $20,%xmm7 movdqa (%r11),%xmm6 por %xmm7,%xmm14 paddd %xmm13,%xmm8 paddd %xmm14,%xmm9 pxor %xmm8,%xmm3 pxor %xmm9,%xmm0 .byte 102,15,56,0,222 .byte 102,15,56,0,198 paddd %xmm3,%xmm4 paddd %xmm0,%xmm5 pxor %xmm4,%xmm13 pxor %xmm5,%xmm14 movdqa %xmm13,%xmm7 pslld $7,%xmm13 psrld $25,%xmm7 movdqa %xmm14,%xmm6 pslld $7,%xmm14 por %xmm7,%xmm13 psrld $25,%xmm6 movdqa (%r10),%xmm7 por %xmm6,%xmm14 movdqa %xmm4,32(%rsp) movdqa %xmm5,48(%rsp) movdqa 0(%rsp),%xmm4 movdqa 16(%rsp),%xmm5 paddd %xmm15,%xmm10 paddd %xmm12,%xmm11 pxor %xmm10,%xmm1 pxor %xmm11,%xmm2 .byte 102,15,56,0,207 .byte 102,15,56,0,215 paddd %xmm1,%xmm4 paddd %xmm2,%xmm5 pxor %xmm4,%xmm15 pxor %xmm5,%xmm12 movdqa %xmm15,%xmm6 pslld $12,%xmm15 psrld $20,%xmm6 movdqa %xmm12,%xmm7 pslld $12,%xmm12 por %xmm6,%xmm15 psrld $20,%xmm7 movdqa (%r11),%xmm6 por %xmm7,%xmm12 paddd %xmm15,%xmm10 paddd %xmm12,%xmm11 pxor %xmm10,%xmm1 pxor %xmm11,%xmm2 .byte 102,15,56,0,206 .byte 102,15,56,0,214 paddd %xmm1,%xmm4 paddd %xmm2,%xmm5 pxor %xmm4,%xmm15 pxor %xmm5,%xmm12 movdqa %xmm15,%xmm7 pslld $7,%xmm15 psrld $25,%xmm7 movdqa %xmm12,%xmm6 pslld $7,%xmm12 por %xmm7,%xmm15 psrld $25,%xmm6 movdqa (%r10),%xmm7 por %xmm6,%xmm12 decl %eax jnz L$oop4x paddd 64(%rsp),%xmm8 paddd 80(%rsp),%xmm9 paddd 96(%rsp),%xmm10 paddd 112(%rsp),%xmm11 movdqa %xmm8,%xmm6 punpckldq %xmm9,%xmm8 movdqa %xmm10,%xmm7 punpckldq %xmm11,%xmm10 punpckhdq %xmm9,%xmm6 punpckhdq %xmm11,%xmm7 movdqa %xmm8,%xmm9 punpcklqdq %xmm10,%xmm8 movdqa %xmm6,%xmm11 punpcklqdq %xmm7,%xmm6 punpckhqdq %xmm10,%xmm9 punpckhqdq %xmm7,%xmm11 paddd 128-256(%rcx),%xmm12 paddd 144-256(%rcx),%xmm13 paddd 160-256(%rcx),%xmm14 paddd 176-256(%rcx),%xmm15 movdqa %xmm8,0(%rsp) movdqa %xmm9,16(%rsp) movdqa 32(%rsp),%xmm8 movdqa 48(%rsp),%xmm9 movdqa %xmm12,%xmm10 punpckldq %xmm13,%xmm12 movdqa %xmm14,%xmm7 punpckldq %xmm15,%xmm14 punpckhdq %xmm13,%xmm10 punpckhdq %xmm15,%xmm7 movdqa %xmm12,%xmm13 punpcklqdq %xmm14,%xmm12 movdqa %xmm10,%xmm15 punpcklqdq %xmm7,%xmm10 punpckhqdq %xmm14,%xmm13 punpckhqdq %xmm7,%xmm15 paddd 192-256(%rcx),%xmm4 paddd 208-256(%rcx),%xmm5 paddd 224-256(%rcx),%xmm8 paddd 240-256(%rcx),%xmm9 movdqa %xmm6,32(%rsp) movdqa %xmm11,48(%rsp) movdqa %xmm4,%xmm14 punpckldq %xmm5,%xmm4 movdqa %xmm8,%xmm7 punpckldq %xmm9,%xmm8 punpckhdq %xmm5,%xmm14 punpckhdq %xmm9,%xmm7 movdqa %xmm4,%xmm5 punpcklqdq %xmm8,%xmm4 movdqa %xmm14,%xmm9 punpcklqdq %xmm7,%xmm14 punpckhqdq %xmm8,%xmm5 punpckhqdq %xmm7,%xmm9 paddd 256-256(%rcx),%xmm0 paddd 272-256(%rcx),%xmm1 paddd 288-256(%rcx),%xmm2 paddd 304-256(%rcx),%xmm3 movdqa %xmm0,%xmm8 punpckldq %xmm1,%xmm0 movdqa %xmm2,%xmm7 punpckldq %xmm3,%xmm2 punpckhdq %xmm1,%xmm8 punpckhdq %xmm3,%xmm7 movdqa %xmm0,%xmm1 punpcklqdq %xmm2,%xmm0 movdqa %xmm8,%xmm3 punpcklqdq %xmm7,%xmm8 punpckhqdq %xmm2,%xmm1 punpckhqdq %xmm7,%xmm3 cmpq $256,%rdx jb L$tail4x movdqu 0(%rsi),%xmm6 movdqu 16(%rsi),%xmm11 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm7 pxor 0(%rsp),%xmm6 pxor %xmm12,%xmm11 pxor %xmm4,%xmm2 pxor %xmm0,%xmm7 movdqu %xmm6,0(%rdi) movdqu 64(%rsi),%xmm6 movdqu %xmm11,16(%rdi) movdqu 80(%rsi),%xmm11 movdqu %xmm2,32(%rdi) movdqu 96(%rsi),%xmm2 movdqu %xmm7,48(%rdi) movdqu 112(%rsi),%xmm7 leaq 128(%rsi),%rsi pxor 16(%rsp),%xmm6 pxor %xmm13,%xmm11 pxor %xmm5,%xmm2 pxor %xmm1,%xmm7 movdqu %xmm6,64(%rdi) movdqu 0(%rsi),%xmm6 movdqu %xmm11,80(%rdi) movdqu 16(%rsi),%xmm11 movdqu %xmm2,96(%rdi) movdqu 32(%rsi),%xmm2 movdqu %xmm7,112(%rdi) leaq 128(%rdi),%rdi movdqu 48(%rsi),%xmm7 pxor 32(%rsp),%xmm6 pxor %xmm10,%xmm11 pxor %xmm14,%xmm2 pxor %xmm8,%xmm7 movdqu %xmm6,0(%rdi) movdqu 64(%rsi),%xmm6 movdqu %xmm11,16(%rdi) movdqu 80(%rsi),%xmm11 movdqu %xmm2,32(%rdi) movdqu 96(%rsi),%xmm2 movdqu %xmm7,48(%rdi) movdqu 112(%rsi),%xmm7 leaq 128(%rsi),%rsi pxor 48(%rsp),%xmm6 pxor %xmm15,%xmm11 pxor %xmm9,%xmm2 pxor %xmm3,%xmm7 movdqu %xmm6,64(%rdi) movdqu %xmm11,80(%rdi) movdqu %xmm2,96(%rdi) movdqu %xmm7,112(%rdi) leaq 128(%rdi),%rdi subq $256,%rdx jnz L$oop_outer4x jmp L$done4x L$tail4x: cmpq $192,%rdx jae L$192_or_more4x cmpq $128,%rdx jae L$128_or_more4x cmpq $64,%rdx jae L$64_or_more4x xorq %r10,%r10 movdqa %xmm12,16(%rsp) movdqa %xmm4,32(%rsp) movdqa %xmm0,48(%rsp) jmp L$oop_tail4x .p2align 5 L$64_or_more4x: movdqu 0(%rsi),%xmm6 movdqu 16(%rsi),%xmm11 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm7 pxor 0(%rsp),%xmm6 pxor %xmm12,%xmm11 pxor %xmm4,%xmm2 pxor %xmm0,%xmm7 movdqu %xmm6,0(%rdi) movdqu %xmm11,16(%rdi) movdqu %xmm2,32(%rdi) movdqu %xmm7,48(%rdi) je L$done4x movdqa 16(%rsp),%xmm6 leaq 64(%rsi),%rsi xorq %r10,%r10 movdqa %xmm6,0(%rsp) movdqa %xmm13,16(%rsp) leaq 64(%rdi),%rdi movdqa %xmm5,32(%rsp) subq $64,%rdx movdqa %xmm1,48(%rsp) jmp L$oop_tail4x .p2align 5 L$128_or_more4x: movdqu 0(%rsi),%xmm6 movdqu 16(%rsi),%xmm11 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm7 pxor 0(%rsp),%xmm6 pxor %xmm12,%xmm11 pxor %xmm4,%xmm2 pxor %xmm0,%xmm7 movdqu %xmm6,0(%rdi) movdqu 64(%rsi),%xmm6 movdqu %xmm11,16(%rdi) movdqu 80(%rsi),%xmm11 movdqu %xmm2,32(%rdi) movdqu 96(%rsi),%xmm2 movdqu %xmm7,48(%rdi) movdqu 112(%rsi),%xmm7 pxor 16(%rsp),%xmm6 pxor %xmm13,%xmm11 pxor %xmm5,%xmm2 pxor %xmm1,%xmm7 movdqu %xmm6,64(%rdi) movdqu %xmm11,80(%rdi) movdqu %xmm2,96(%rdi) movdqu %xmm7,112(%rdi) je L$done4x movdqa 32(%rsp),%xmm6 leaq 128(%rsi),%rsi xorq %r10,%r10 movdqa %xmm6,0(%rsp) movdqa %xmm10,16(%rsp) leaq 128(%rdi),%rdi movdqa %xmm14,32(%rsp) subq $128,%rdx movdqa %xmm8,48(%rsp) jmp L$oop_tail4x .p2align 5 L$192_or_more4x: movdqu 0(%rsi),%xmm6 movdqu 16(%rsi),%xmm11 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm7 pxor 0(%rsp),%xmm6 pxor %xmm12,%xmm11 pxor %xmm4,%xmm2 pxor %xmm0,%xmm7 movdqu %xmm6,0(%rdi) movdqu 64(%rsi),%xmm6 movdqu %xmm11,16(%rdi) movdqu 80(%rsi),%xmm11 movdqu %xmm2,32(%rdi) movdqu 96(%rsi),%xmm2 movdqu %xmm7,48(%rdi) movdqu 112(%rsi),%xmm7 leaq 128(%rsi),%rsi pxor 16(%rsp),%xmm6 pxor %xmm13,%xmm11 pxor %xmm5,%xmm2 pxor %xmm1,%xmm7 movdqu %xmm6,64(%rdi) movdqu 0(%rsi),%xmm6 movdqu %xmm11,80(%rdi) movdqu 16(%rsi),%xmm11 movdqu %xmm2,96(%rdi) movdqu 32(%rsi),%xmm2 movdqu %xmm7,112(%rdi) leaq 128(%rdi),%rdi movdqu 48(%rsi),%xmm7 pxor 32(%rsp),%xmm6 pxor %xmm10,%xmm11 pxor %xmm14,%xmm2 pxor %xmm8,%xmm7 movdqu %xmm6,0(%rdi) movdqu %xmm11,16(%rdi) movdqu %xmm2,32(%rdi) movdqu %xmm7,48(%rdi) je L$done4x movdqa 48(%rsp),%xmm6 leaq 64(%rsi),%rsi xorq %r10,%r10 movdqa %xmm6,0(%rsp) movdqa %xmm15,16(%rsp) leaq 64(%rdi),%rdi movdqa %xmm9,32(%rsp) subq $192,%rdx movdqa %xmm3,48(%rsp) L$oop_tail4x: movzbl (%rsi,%r10,1),%eax movzbl (%rsp,%r10,1),%ecx leaq 1(%r10),%r10 xorl %ecx,%eax movb %al,-1(%rdi,%r10,1) decq %rdx jnz L$oop_tail4x L$done4x: leaq (%r9),%rsp L$4x_epilogue: .byte 0xf3,0xc3 .globl _ChaCha20_ctr32_avx2 .private_extern _ChaCha20_ctr32_avx2 .p2align 5 _ChaCha20_ctr32_avx2: _CET_ENDBR movq %rsp,%r9 subq $0x280+8,%rsp andq $-32,%rsp vzeroupper vbroadcasti128 L$sigma(%rip),%ymm11 vbroadcasti128 (%rcx),%ymm3 vbroadcasti128 16(%rcx),%ymm15 vbroadcasti128 (%r8),%ymm7 leaq 256(%rsp),%rcx leaq 512(%rsp),%rax leaq L$rot16(%rip),%r10 leaq L$rot24(%rip),%r11 vpshufd $0x00,%ymm11,%ymm8 vpshufd $0x55,%ymm11,%ymm9 vmovdqa %ymm8,128-256(%rcx) vpshufd $0xaa,%ymm11,%ymm10 vmovdqa %ymm9,160-256(%rcx) vpshufd $0xff,%ymm11,%ymm11 vmovdqa %ymm10,192-256(%rcx) vmovdqa %ymm11,224-256(%rcx) vpshufd $0x00,%ymm3,%ymm0 vpshufd $0x55,%ymm3,%ymm1 vmovdqa %ymm0,256-256(%rcx) vpshufd $0xaa,%ymm3,%ymm2 vmovdqa %ymm1,288-256(%rcx) vpshufd $0xff,%ymm3,%ymm3 vmovdqa %ymm2,320-256(%rcx) vmovdqa %ymm3,352-256(%rcx) vpshufd $0x00,%ymm15,%ymm12 vpshufd $0x55,%ymm15,%ymm13 vmovdqa %ymm12,384-512(%rax) vpshufd $0xaa,%ymm15,%ymm14 vmovdqa %ymm13,416-512(%rax) vpshufd $0xff,%ymm15,%ymm15 vmovdqa %ymm14,448-512(%rax) vmovdqa %ymm15,480-512(%rax) vpshufd $0x00,%ymm7,%ymm4 vpshufd $0x55,%ymm7,%ymm5 vpaddd L$incy(%rip),%ymm4,%ymm4 vpshufd $0xaa,%ymm7,%ymm6 vmovdqa %ymm5,544-512(%rax) vpshufd $0xff,%ymm7,%ymm7 vmovdqa %ymm6,576-512(%rax) vmovdqa %ymm7,608-512(%rax) jmp L$oop_enter8x .p2align 5 L$oop_outer8x: vmovdqa 128-256(%rcx),%ymm8 vmovdqa 160-256(%rcx),%ymm9 vmovdqa 192-256(%rcx),%ymm10 vmovdqa 224-256(%rcx),%ymm11 vmovdqa 256-256(%rcx),%ymm0 vmovdqa 288-256(%rcx),%ymm1 vmovdqa 320-256(%rcx),%ymm2 vmovdqa 352-256(%rcx),%ymm3 vmovdqa 384-512(%rax),%ymm12 vmovdqa 416-512(%rax),%ymm13 vmovdqa 448-512(%rax),%ymm14 vmovdqa 480-512(%rax),%ymm15 vmovdqa 512-512(%rax),%ymm4 vmovdqa 544-512(%rax),%ymm5 vmovdqa 576-512(%rax),%ymm6 vmovdqa 608-512(%rax),%ymm7 vpaddd L$eight(%rip),%ymm4,%ymm4 L$oop_enter8x: vmovdqa %ymm14,64(%rsp) vmovdqa %ymm15,96(%rsp) vbroadcasti128 (%r10),%ymm15 vmovdqa %ymm4,512-512(%rax) movl $10,%eax jmp L$oop8x .p2align 5 L$oop8x: vpaddd %ymm0,%ymm8,%ymm8 vpxor %ymm4,%ymm8,%ymm4 vpshufb %ymm15,%ymm4,%ymm4 vpaddd %ymm1,%ymm9,%ymm9 vpxor %ymm5,%ymm9,%ymm5 vpshufb %ymm15,%ymm5,%ymm5 vpaddd %ymm4,%ymm12,%ymm12 vpxor %ymm0,%ymm12,%ymm0 vpslld $12,%ymm0,%ymm14 vpsrld $20,%ymm0,%ymm0 vpor %ymm0,%ymm14,%ymm0 vbroadcasti128 (%r11),%ymm14 vpaddd %ymm5,%ymm13,%ymm13 vpxor %ymm1,%ymm13,%ymm1 vpslld $12,%ymm1,%ymm15 vpsrld $20,%ymm1,%ymm1 vpor %ymm1,%ymm15,%ymm1 vpaddd %ymm0,%ymm8,%ymm8 vpxor %ymm4,%ymm8,%ymm4 vpshufb %ymm14,%ymm4,%ymm4 vpaddd %ymm1,%ymm9,%ymm9 vpxor %ymm5,%ymm9,%ymm5 vpshufb %ymm14,%ymm5,%ymm5 vpaddd %ymm4,%ymm12,%ymm12 vpxor %ymm0,%ymm12,%ymm0 vpslld $7,%ymm0,%ymm15 vpsrld $25,%ymm0,%ymm0 vpor %ymm0,%ymm15,%ymm0 vbroadcasti128 (%r10),%ymm15 vpaddd %ymm5,%ymm13,%ymm13 vpxor %ymm1,%ymm13,%ymm1 vpslld $7,%ymm1,%ymm14 vpsrld $25,%ymm1,%ymm1 vpor %ymm1,%ymm14,%ymm1 vmovdqa %ymm12,0(%rsp) vmovdqa %ymm13,32(%rsp) vmovdqa 64(%rsp),%ymm12 vmovdqa 96(%rsp),%ymm13 vpaddd %ymm2,%ymm10,%ymm10 vpxor %ymm6,%ymm10,%ymm6 vpshufb %ymm15,%ymm6,%ymm6 vpaddd %ymm3,%ymm11,%ymm11 vpxor %ymm7,%ymm11,%ymm7 vpshufb %ymm15,%ymm7,%ymm7 vpaddd %ymm6,%ymm12,%ymm12 vpxor %ymm2,%ymm12,%ymm2 vpslld $12,%ymm2,%ymm14 vpsrld $20,%ymm2,%ymm2 vpor %ymm2,%ymm14,%ymm2 vbroadcasti128 (%r11),%ymm14 vpaddd %ymm7,%ymm13,%ymm13 vpxor %ymm3,%ymm13,%ymm3 vpslld $12,%ymm3,%ymm15 vpsrld $20,%ymm3,%ymm3 vpor %ymm3,%ymm15,%ymm3 vpaddd %ymm2,%ymm10,%ymm10 vpxor %ymm6,%ymm10,%ymm6 vpshufb %ymm14,%ymm6,%ymm6 vpaddd %ymm3,%ymm11,%ymm11 vpxor %ymm7,%ymm11,%ymm7 vpshufb %ymm14,%ymm7,%ymm7 vpaddd %ymm6,%ymm12,%ymm12 vpxor %ymm2,%ymm12,%ymm2 vpslld $7,%ymm2,%ymm15 vpsrld $25,%ymm2,%ymm2 vpor %ymm2,%ymm15,%ymm2 vbroadcasti128 (%r10),%ymm15 vpaddd %ymm7,%ymm13,%ymm13 vpxor %ymm3,%ymm13,%ymm3 vpslld $7,%ymm3,%ymm14 vpsrld $25,%ymm3,%ymm3 vpor %ymm3,%ymm14,%ymm3 vpaddd %ymm1,%ymm8,%ymm8 vpxor %ymm7,%ymm8,%ymm7 vpshufb %ymm15,%ymm7,%ymm7 vpaddd %ymm2,%ymm9,%ymm9 vpxor %ymm4,%ymm9,%ymm4 vpshufb %ymm15,%ymm4,%ymm4 vpaddd %ymm7,%ymm12,%ymm12 vpxor %ymm1,%ymm12,%ymm1 vpslld $12,%ymm1,%ymm14 vpsrld $20,%ymm1,%ymm1 vpor %ymm1,%ymm14,%ymm1 vbroadcasti128 (%r11),%ymm14 vpaddd %ymm4,%ymm13,%ymm13 vpxor %ymm2,%ymm13,%ymm2 vpslld $12,%ymm2,%ymm15 vpsrld $20,%ymm2,%ymm2 vpor %ymm2,%ymm15,%ymm2 vpaddd %ymm1,%ymm8,%ymm8 vpxor %ymm7,%ymm8,%ymm7 vpshufb %ymm14,%ymm7,%ymm7 vpaddd %ymm2,%ymm9,%ymm9 vpxor %ymm4,%ymm9,%ymm4 vpshufb %ymm14,%ymm4,%ymm4 vpaddd %ymm7,%ymm12,%ymm12 vpxor %ymm1,%ymm12,%ymm1 vpslld $7,%ymm1,%ymm15 vpsrld $25,%ymm1,%ymm1 vpor %ymm1,%ymm15,%ymm1 vbroadcasti128 (%r10),%ymm15 vpaddd %ymm4,%ymm13,%ymm13 vpxor %ymm2,%ymm13,%ymm2 vpslld $7,%ymm2,%ymm14 vpsrld $25,%ymm2,%ymm2 vpor %ymm2,%ymm14,%ymm2 vmovdqa %ymm12,64(%rsp) vmovdqa %ymm13,96(%rsp) vmovdqa 0(%rsp),%ymm12 vmovdqa 32(%rsp),%ymm13 vpaddd %ymm3,%ymm10,%ymm10 vpxor %ymm5,%ymm10,%ymm5 vpshufb %ymm15,%ymm5,%ymm5 vpaddd %ymm0,%ymm11,%ymm11 vpxor %ymm6,%ymm11,%ymm6 vpshufb %ymm15,%ymm6,%ymm6 vpaddd %ymm5,%ymm12,%ymm12 vpxor %ymm3,%ymm12,%ymm3 vpslld $12,%ymm3,%ymm14 vpsrld $20,%ymm3,%ymm3 vpor %ymm3,%ymm14,%ymm3 vbroadcasti128 (%r11),%ymm14 vpaddd %ymm6,%ymm13,%ymm13 vpxor %ymm0,%ymm13,%ymm0 vpslld $12,%ymm0,%ymm15 vpsrld $20,%ymm0,%ymm0 vpor %ymm0,%ymm15,%ymm0 vpaddd %ymm3,%ymm10,%ymm10 vpxor %ymm5,%ymm10,%ymm5 vpshufb %ymm14,%ymm5,%ymm5 vpaddd %ymm0,%ymm11,%ymm11 vpxor %ymm6,%ymm11,%ymm6 vpshufb %ymm14,%ymm6,%ymm6 vpaddd %ymm5,%ymm12,%ymm12 vpxor %ymm3,%ymm12,%ymm3 vpslld $7,%ymm3,%ymm15 vpsrld $25,%ymm3,%ymm3 vpor %ymm3,%ymm15,%ymm3 vbroadcasti128 (%r10),%ymm15 vpaddd %ymm6,%ymm13,%ymm13 vpxor %ymm0,%ymm13,%ymm0 vpslld $7,%ymm0,%ymm14 vpsrld $25,%ymm0,%ymm0 vpor %ymm0,%ymm14,%ymm0 decl %eax jnz L$oop8x leaq 512(%rsp),%rax vpaddd 128-256(%rcx),%ymm8,%ymm8 vpaddd 160-256(%rcx),%ymm9,%ymm9 vpaddd 192-256(%rcx),%ymm10,%ymm10 vpaddd 224-256(%rcx),%ymm11,%ymm11 vpunpckldq %ymm9,%ymm8,%ymm14 vpunpckldq %ymm11,%ymm10,%ymm15 vpunpckhdq %ymm9,%ymm8,%ymm8 vpunpckhdq %ymm11,%ymm10,%ymm10 vpunpcklqdq %ymm15,%ymm14,%ymm9 vpunpckhqdq %ymm15,%ymm14,%ymm14 vpunpcklqdq %ymm10,%ymm8,%ymm11 vpunpckhqdq %ymm10,%ymm8,%ymm8 vpaddd 256-256(%rcx),%ymm0,%ymm0 vpaddd 288-256(%rcx),%ymm1,%ymm1 vpaddd 320-256(%rcx),%ymm2,%ymm2 vpaddd 352-256(%rcx),%ymm3,%ymm3 vpunpckldq %ymm1,%ymm0,%ymm10 vpunpckldq %ymm3,%ymm2,%ymm15 vpunpckhdq %ymm1,%ymm0,%ymm0 vpunpckhdq %ymm3,%ymm2,%ymm2 vpunpcklqdq %ymm15,%ymm10,%ymm1 vpunpckhqdq %ymm15,%ymm10,%ymm10 vpunpcklqdq %ymm2,%ymm0,%ymm3 vpunpckhqdq %ymm2,%ymm0,%ymm0 vperm2i128 $0x20,%ymm1,%ymm9,%ymm15 vperm2i128 $0x31,%ymm1,%ymm9,%ymm1 vperm2i128 $0x20,%ymm10,%ymm14,%ymm9 vperm2i128 $0x31,%ymm10,%ymm14,%ymm10 vperm2i128 $0x20,%ymm3,%ymm11,%ymm14 vperm2i128 $0x31,%ymm3,%ymm11,%ymm3 vperm2i128 $0x20,%ymm0,%ymm8,%ymm11 vperm2i128 $0x31,%ymm0,%ymm8,%ymm0 vmovdqa %ymm15,0(%rsp) vmovdqa %ymm9,32(%rsp) vmovdqa 64(%rsp),%ymm15 vmovdqa 96(%rsp),%ymm9 vpaddd 384-512(%rax),%ymm12,%ymm12 vpaddd 416-512(%rax),%ymm13,%ymm13 vpaddd 448-512(%rax),%ymm15,%ymm15 vpaddd 480-512(%rax),%ymm9,%ymm9 vpunpckldq %ymm13,%ymm12,%ymm2 vpunpckldq %ymm9,%ymm15,%ymm8 vpunpckhdq %ymm13,%ymm12,%ymm12 vpunpckhdq %ymm9,%ymm15,%ymm15 vpunpcklqdq %ymm8,%ymm2,%ymm13 vpunpckhqdq %ymm8,%ymm2,%ymm2 vpunpcklqdq %ymm15,%ymm12,%ymm9 vpunpckhqdq %ymm15,%ymm12,%ymm12 vpaddd 512-512(%rax),%ymm4,%ymm4 vpaddd 544-512(%rax),%ymm5,%ymm5 vpaddd 576-512(%rax),%ymm6,%ymm6 vpaddd 608-512(%rax),%ymm7,%ymm7 vpunpckldq %ymm5,%ymm4,%ymm15 vpunpckldq %ymm7,%ymm6,%ymm8 vpunpckhdq %ymm5,%ymm4,%ymm4 vpunpckhdq %ymm7,%ymm6,%ymm6 vpunpcklqdq %ymm8,%ymm15,%ymm5 vpunpckhqdq %ymm8,%ymm15,%ymm15 vpunpcklqdq %ymm6,%ymm4,%ymm7 vpunpckhqdq %ymm6,%ymm4,%ymm4 vperm2i128 $0x20,%ymm5,%ymm13,%ymm8 vperm2i128 $0x31,%ymm5,%ymm13,%ymm5 vperm2i128 $0x20,%ymm15,%ymm2,%ymm13 vperm2i128 $0x31,%ymm15,%ymm2,%ymm15 vperm2i128 $0x20,%ymm7,%ymm9,%ymm2 vperm2i128 $0x31,%ymm7,%ymm9,%ymm7 vperm2i128 $0x20,%ymm4,%ymm12,%ymm9 vperm2i128 $0x31,%ymm4,%ymm12,%ymm4 vmovdqa 0(%rsp),%ymm6 vmovdqa 32(%rsp),%ymm12 cmpq $512,%rdx jb L$tail8x vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 leaq 128(%rsi),%rsi vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) leaq 128(%rdi),%rdi vpxor 0(%rsi),%ymm12,%ymm12 vpxor 32(%rsi),%ymm13,%ymm13 vpxor 64(%rsi),%ymm10,%ymm10 vpxor 96(%rsi),%ymm15,%ymm15 leaq 128(%rsi),%rsi vmovdqu %ymm12,0(%rdi) vmovdqu %ymm13,32(%rdi) vmovdqu %ymm10,64(%rdi) vmovdqu %ymm15,96(%rdi) leaq 128(%rdi),%rdi vpxor 0(%rsi),%ymm14,%ymm14 vpxor 32(%rsi),%ymm2,%ymm2 vpxor 64(%rsi),%ymm3,%ymm3 vpxor 96(%rsi),%ymm7,%ymm7 leaq 128(%rsi),%rsi vmovdqu %ymm14,0(%rdi) vmovdqu %ymm2,32(%rdi) vmovdqu %ymm3,64(%rdi) vmovdqu %ymm7,96(%rdi) leaq 128(%rdi),%rdi vpxor 0(%rsi),%ymm11,%ymm11 vpxor 32(%rsi),%ymm9,%ymm9 vpxor 64(%rsi),%ymm0,%ymm0 vpxor 96(%rsi),%ymm4,%ymm4 leaq 128(%rsi),%rsi vmovdqu %ymm11,0(%rdi) vmovdqu %ymm9,32(%rdi) vmovdqu %ymm0,64(%rdi) vmovdqu %ymm4,96(%rdi) leaq 128(%rdi),%rdi subq $512,%rdx jnz L$oop_outer8x jmp L$done8x L$tail8x: cmpq $448,%rdx jae L$448_or_more8x cmpq $384,%rdx jae L$384_or_more8x cmpq $320,%rdx jae L$320_or_more8x cmpq $256,%rdx jae L$256_or_more8x cmpq $192,%rdx jae L$192_or_more8x cmpq $128,%rdx jae L$128_or_more8x cmpq $64,%rdx jae L$64_or_more8x xorq %r10,%r10 vmovdqa %ymm6,0(%rsp) vmovdqa %ymm8,32(%rsp) jmp L$oop_tail8x .p2align 5 L$64_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) je L$done8x leaq 64(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm1,0(%rsp) leaq 64(%rdi),%rdi subq $64,%rdx vmovdqa %ymm5,32(%rsp) jmp L$oop_tail8x .p2align 5 L$128_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) je L$done8x leaq 128(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm12,0(%rsp) leaq 128(%rdi),%rdi subq $128,%rdx vmovdqa %ymm13,32(%rsp) jmp L$oop_tail8x .p2align 5 L$192_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) je L$done8x leaq 192(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm10,0(%rsp) leaq 192(%rdi),%rdi subq $192,%rdx vmovdqa %ymm15,32(%rsp) jmp L$oop_tail8x .p2align 5 L$256_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vpxor 192(%rsi),%ymm10,%ymm10 vpxor 224(%rsi),%ymm15,%ymm15 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) vmovdqu %ymm10,192(%rdi) vmovdqu %ymm15,224(%rdi) je L$done8x leaq 256(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm14,0(%rsp) leaq 256(%rdi),%rdi subq $256,%rdx vmovdqa %ymm2,32(%rsp) jmp L$oop_tail8x .p2align 5 L$320_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vpxor 192(%rsi),%ymm10,%ymm10 vpxor 224(%rsi),%ymm15,%ymm15 vpxor 256(%rsi),%ymm14,%ymm14 vpxor 288(%rsi),%ymm2,%ymm2 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) vmovdqu %ymm10,192(%rdi) vmovdqu %ymm15,224(%rdi) vmovdqu %ymm14,256(%rdi) vmovdqu %ymm2,288(%rdi) je L$done8x leaq 320(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm3,0(%rsp) leaq 320(%rdi),%rdi subq $320,%rdx vmovdqa %ymm7,32(%rsp) jmp L$oop_tail8x .p2align 5 L$384_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vpxor 192(%rsi),%ymm10,%ymm10 vpxor 224(%rsi),%ymm15,%ymm15 vpxor 256(%rsi),%ymm14,%ymm14 vpxor 288(%rsi),%ymm2,%ymm2 vpxor 320(%rsi),%ymm3,%ymm3 vpxor 352(%rsi),%ymm7,%ymm7 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) vmovdqu %ymm10,192(%rdi) vmovdqu %ymm15,224(%rdi) vmovdqu %ymm14,256(%rdi) vmovdqu %ymm2,288(%rdi) vmovdqu %ymm3,320(%rdi) vmovdqu %ymm7,352(%rdi) je L$done8x leaq 384(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm11,0(%rsp) leaq 384(%rdi),%rdi subq $384,%rdx vmovdqa %ymm9,32(%rsp) jmp L$oop_tail8x .p2align 5 L$448_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vpxor 192(%rsi),%ymm10,%ymm10 vpxor 224(%rsi),%ymm15,%ymm15 vpxor 256(%rsi),%ymm14,%ymm14 vpxor 288(%rsi),%ymm2,%ymm2 vpxor 320(%rsi),%ymm3,%ymm3 vpxor 352(%rsi),%ymm7,%ymm7 vpxor 384(%rsi),%ymm11,%ymm11 vpxor 416(%rsi),%ymm9,%ymm9 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) vmovdqu %ymm10,192(%rdi) vmovdqu %ymm15,224(%rdi) vmovdqu %ymm14,256(%rdi) vmovdqu %ymm2,288(%rdi) vmovdqu %ymm3,320(%rdi) vmovdqu %ymm7,352(%rdi) vmovdqu %ymm11,384(%rdi) vmovdqu %ymm9,416(%rdi) je L$done8x leaq 448(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm0,0(%rsp) leaq 448(%rdi),%rdi subq $448,%rdx vmovdqa %ymm4,32(%rsp) L$oop_tail8x: movzbl (%rsi,%r10,1),%eax movzbl (%rsp,%r10,1),%ecx leaq 1(%r10),%r10 xorl %ecx,%eax movb %al,-1(%rdi,%r10,1) decq %rdx jnz L$oop_tail8x L$done8x: vzeroall leaq (%r9),%rsp L$8x_epilogue: .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
190,132
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text chacha20_poly1305_constants: .section __DATA,__const .p2align 6 L$chacha20_consts: .byte 'e','x','p','a','n','d',' ','3','2','-','b','y','t','e',' ','k' .byte 'e','x','p','a','n','d',' ','3','2','-','b','y','t','e',' ','k' L$rol8: .byte 3,0,1,2, 7,4,5,6, 11,8,9,10, 15,12,13,14 .byte 3,0,1,2, 7,4,5,6, 11,8,9,10, 15,12,13,14 L$rol16: .byte 2,3,0,1, 6,7,4,5, 10,11,8,9, 14,15,12,13 .byte 2,3,0,1, 6,7,4,5, 10,11,8,9, 14,15,12,13 L$avx2_init: .long 0,0,0,0 L$sse_inc: .long 1,0,0,0 L$avx2_inc: .long 2,0,0,0,2,0,0,0 L$clamp: .quad 0x0FFFFFFC0FFFFFFF, 0x0FFFFFFC0FFFFFFC .quad 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF .p2align 4 L$and_masks: .byte 0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff .text .p2align 6 poly_hash_ad_internal: xorq %r10,%r10 xorq %r11,%r11 xorq %r12,%r12 cmpq $13,%r8 jne L$hash_ad_loop L$poly_fast_tls_ad: movq (%rcx),%r10 movq 5(%rcx),%r11 shrq $24,%r11 movq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .byte 0xf3,0xc3 L$hash_ad_loop: cmpq $16,%r8 jb L$hash_ad_tail addq 0+0(%rcx),%r10 adcq 8+0(%rcx),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rcx),%rcx subq $16,%r8 jmp L$hash_ad_loop L$hash_ad_tail: cmpq $0,%r8 je L$hash_ad_done xorq %r13,%r13 xorq %r14,%r14 xorq %r15,%r15 addq %r8,%rcx L$hash_ad_tail_loop: shldq $8,%r13,%r14 shlq $8,%r13 movzbq -1(%rcx),%r15 xorq %r15,%r13 decq %rcx decq %r8 jne L$hash_ad_tail_loop addq %r13,%r10 adcq %r14,%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$hash_ad_done: .byte 0xf3,0xc3 .globl _chacha20_poly1305_open .private_extern _chacha20_poly1305_open .p2align 6 _chacha20_poly1305_open: _CET_ENDBR pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 pushq %r9 subq $288 + 0 + 32,%rsp leaq 32(%rsp),%rbp andq $-32,%rbp movq %rdx,%rbx movq %r8,0+0+32(%rbp) movq %rbx,8+0+32(%rbp) movl _OPENSSL_ia32cap_P+8(%rip),%eax andl $288,%eax xorl $288,%eax jz chacha20_poly1305_open_avx2 cmpq $128,%rbx jbe L$open_sse_128 movdqa L$chacha20_consts(%rip),%xmm0 movdqu 0(%r9),%xmm4 movdqu 16(%r9),%xmm8 movdqu 32(%r9),%xmm12 movdqa %xmm12,%xmm7 movdqa %xmm4,0+48(%rbp) movdqa %xmm8,0+64(%rbp) movdqa %xmm12,0+96(%rbp) movq $10,%r10 L$open_sse_init_rounds: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 decq %r10 jne L$open_sse_init_rounds paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 pand L$clamp(%rip),%xmm0 movdqa %xmm0,0+0(%rbp) movdqa %xmm4,0+16(%rbp) movq %r8,%r8 call poly_hash_ad_internal L$open_sse_main_loop: cmpq $256,%rbx jb L$open_sse_tail movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa %xmm0,%xmm3 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa 0+96(%rbp),%xmm15 paddd L$sse_inc(%rip),%xmm15 movdqa %xmm15,%xmm14 paddd L$sse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movdqa %xmm15,0+144(%rbp) movq $4,%rcx movq %rsi,%r8 L$open_sse_main_loop_rounds: movdqa %xmm8,0+80(%rbp) movdqa L$rol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 leaq 16(%r8),%r8 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movdqa L$rol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 .byte 102,15,58,15,255,4 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,12 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 movdqa %xmm8,0+80(%rbp) movdqa L$rol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movdqa L$rol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 .byte 102,15,58,15,255,12 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,4 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 decq %rcx jge L$open_sse_main_loop_rounds addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 cmpq $-6,%rcx jg L$open_sse_main_loop_rounds paddd L$chacha20_consts(%rip),%xmm3 paddd 0+48(%rbp),%xmm7 paddd 0+64(%rbp),%xmm11 paddd 0+144(%rbp),%xmm15 paddd L$chacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd L$chacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqa %xmm12,0+80(%rbp) movdqu 0 + 0(%rsi),%xmm12 pxor %xmm3,%xmm12 movdqu %xmm12,0 + 0(%rdi) movdqu 16 + 0(%rsi),%xmm12 pxor %xmm7,%xmm12 movdqu %xmm12,16 + 0(%rdi) movdqu 32 + 0(%rsi),%xmm12 pxor %xmm11,%xmm12 movdqu %xmm12,32 + 0(%rdi) movdqu 48 + 0(%rsi),%xmm12 pxor %xmm15,%xmm12 movdqu %xmm12,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 64(%rdi) movdqu %xmm6,16 + 64(%rdi) movdqu %xmm10,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) movdqu 0 + 128(%rsi),%xmm3 movdqu 16 + 128(%rsi),%xmm7 movdqu 32 + 128(%rsi),%xmm11 movdqu 48 + 128(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 128(%rdi) movdqu %xmm5,16 + 128(%rdi) movdqu %xmm9,32 + 128(%rdi) movdqu %xmm15,48 + 128(%rdi) movdqu 0 + 192(%rsi),%xmm3 movdqu 16 + 192(%rsi),%xmm7 movdqu 32 + 192(%rsi),%xmm11 movdqu 48 + 192(%rsi),%xmm15 pxor %xmm3,%xmm0 pxor %xmm7,%xmm4 pxor %xmm11,%xmm8 pxor 0+80(%rbp),%xmm15 movdqu %xmm0,0 + 192(%rdi) movdqu %xmm4,16 + 192(%rdi) movdqu %xmm8,32 + 192(%rdi) movdqu %xmm15,48 + 192(%rdi) leaq 256(%rsi),%rsi leaq 256(%rdi),%rdi subq $256,%rbx jmp L$open_sse_main_loop L$open_sse_tail: testq %rbx,%rbx jz L$open_sse_finalize cmpq $192,%rbx ja L$open_sse_tail_256 cmpq $128,%rbx ja L$open_sse_tail_192 cmpq $64,%rbx ja L$open_sse_tail_128 movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa 0+96(%rbp),%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) xorq %r8,%r8 movq %rbx,%rcx cmpq $16,%rcx jb L$open_sse_tail_64_rounds L$open_sse_tail_64_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 subq $16,%rcx L$open_sse_tail_64_rounds: addq $16,%r8 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 cmpq $16,%rcx jae L$open_sse_tail_64_rounds_and_x1hash cmpq $160,%r8 jne L$open_sse_tail_64_rounds paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 jmp L$open_sse_tail_64_dec_loop L$open_sse_tail_128: movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa 0+96(%rbp),%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movq %rbx,%rcx andq $-16,%rcx xorq %r8,%r8 L$open_sse_tail_128_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$open_sse_tail_128_rounds: addq $16,%r8 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 cmpq %rcx,%r8 jb L$open_sse_tail_128_rounds_and_x1hash cmpq $160,%r8 jne L$open_sse_tail_128_rounds paddd L$chacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 0(%rdi) movdqu %xmm5,16 + 0(%rdi) movdqu %xmm9,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) subq $64,%rbx leaq 64(%rsi),%rsi leaq 64(%rdi),%rdi jmp L$open_sse_tail_64_dec_loop L$open_sse_tail_192: movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa 0+96(%rbp),%xmm14 paddd L$sse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movq %rbx,%rcx movq $160,%r8 cmpq $160,%rcx cmovgq %r8,%rcx andq $-16,%rcx xorq %r8,%r8 L$open_sse_tail_192_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$open_sse_tail_192_rounds: addq $16,%r8 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 cmpq %rcx,%r8 jb L$open_sse_tail_192_rounds_and_x1hash cmpq $160,%r8 jne L$open_sse_tail_192_rounds cmpq $176,%rbx jb L$open_sse_tail_192_finish addq 0+160(%rsi),%r10 adcq 8+160(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 cmpq $192,%rbx jb L$open_sse_tail_192_finish addq 0+176(%rsi),%r10 adcq 8+176(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$open_sse_tail_192_finish: paddd L$chacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd L$chacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 0(%rdi) movdqu %xmm6,16 + 0(%rdi) movdqu %xmm10,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 64(%rdi) movdqu %xmm5,16 + 64(%rdi) movdqu %xmm9,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) subq $128,%rbx leaq 128(%rsi),%rsi leaq 128(%rdi),%rdi jmp L$open_sse_tail_64_dec_loop L$open_sse_tail_256: movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa %xmm0,%xmm3 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa 0+96(%rbp),%xmm15 paddd L$sse_inc(%rip),%xmm15 movdqa %xmm15,%xmm14 paddd L$sse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movdqa %xmm15,0+144(%rbp) xorq %r8,%r8 L$open_sse_tail_256_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movdqa %xmm11,0+80(%rbp) paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm11 pslld $12,%xmm11 psrld $20,%xmm4 pxor %xmm11,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm11 pslld $7,%xmm11 psrld $25,%xmm4 pxor %xmm11,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm11 pslld $12,%xmm11 psrld $20,%xmm5 pxor %xmm11,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm11 pslld $7,%xmm11 psrld $25,%xmm5 pxor %xmm11,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm11 pslld $12,%xmm11 psrld $20,%xmm6 pxor %xmm11,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm11 pslld $7,%xmm11 psrld $25,%xmm6 pxor %xmm11,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 movdqa 0+80(%rbp),%xmm11 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movdqa %xmm9,0+80(%rbp) paddd %xmm7,%xmm3 pxor %xmm3,%xmm15 pshufb L$rol16(%rip),%xmm15 paddd %xmm15,%xmm11 pxor %xmm11,%xmm7 movdqa %xmm7,%xmm9 pslld $12,%xmm9 psrld $20,%xmm7 pxor %xmm9,%xmm7 paddd %xmm7,%xmm3 pxor %xmm3,%xmm15 pshufb L$rol8(%rip),%xmm15 paddd %xmm15,%xmm11 pxor %xmm11,%xmm7 movdqa %xmm7,%xmm9 pslld $7,%xmm9 psrld $25,%xmm7 pxor %xmm9,%xmm7 .byte 102,15,58,15,255,4 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,12 movdqa 0+80(%rbp),%xmm9 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx movdqa %xmm11,0+80(%rbp) paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm11 pslld $12,%xmm11 psrld $20,%xmm4 pxor %xmm11,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm11 pslld $7,%xmm11 psrld $25,%xmm4 pxor %xmm11,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm11 pslld $12,%xmm11 psrld $20,%xmm5 pxor %xmm11,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm11 pslld $7,%xmm11 psrld $25,%xmm5 pxor %xmm11,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm11 pslld $12,%xmm11 psrld $20,%xmm6 pxor %xmm11,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm11 pslld $7,%xmm11 psrld $25,%xmm6 pxor %xmm11,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 movdqa 0+80(%rbp),%xmm11 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movdqa %xmm9,0+80(%rbp) paddd %xmm7,%xmm3 pxor %xmm3,%xmm15 pshufb L$rol16(%rip),%xmm15 paddd %xmm15,%xmm11 pxor %xmm11,%xmm7 movdqa %xmm7,%xmm9 pslld $12,%xmm9 psrld $20,%xmm7 pxor %xmm9,%xmm7 paddd %xmm7,%xmm3 pxor %xmm3,%xmm15 pshufb L$rol8(%rip),%xmm15 paddd %xmm15,%xmm11 pxor %xmm11,%xmm7 movdqa %xmm7,%xmm9 pslld $7,%xmm9 psrld $25,%xmm7 pxor %xmm9,%xmm7 .byte 102,15,58,15,255,12 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,4 movdqa 0+80(%rbp),%xmm9 addq $16,%r8 cmpq $160,%r8 jb L$open_sse_tail_256_rounds_and_x1hash movq %rbx,%rcx andq $-16,%rcx L$open_sse_tail_256_hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq $16,%r8 cmpq %rcx,%r8 jb L$open_sse_tail_256_hash paddd L$chacha20_consts(%rip),%xmm3 paddd 0+48(%rbp),%xmm7 paddd 0+64(%rbp),%xmm11 paddd 0+144(%rbp),%xmm15 paddd L$chacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd L$chacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqa %xmm12,0+80(%rbp) movdqu 0 + 0(%rsi),%xmm12 pxor %xmm3,%xmm12 movdqu %xmm12,0 + 0(%rdi) movdqu 16 + 0(%rsi),%xmm12 pxor %xmm7,%xmm12 movdqu %xmm12,16 + 0(%rdi) movdqu 32 + 0(%rsi),%xmm12 pxor %xmm11,%xmm12 movdqu %xmm12,32 + 0(%rdi) movdqu 48 + 0(%rsi),%xmm12 pxor %xmm15,%xmm12 movdqu %xmm12,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 64(%rdi) movdqu %xmm6,16 + 64(%rdi) movdqu %xmm10,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) movdqu 0 + 128(%rsi),%xmm3 movdqu 16 + 128(%rsi),%xmm7 movdqu 32 + 128(%rsi),%xmm11 movdqu 48 + 128(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 128(%rdi) movdqu %xmm5,16 + 128(%rdi) movdqu %xmm9,32 + 128(%rdi) movdqu %xmm15,48 + 128(%rdi) movdqa 0+80(%rbp),%xmm12 subq $192,%rbx leaq 192(%rsi),%rsi leaq 192(%rdi),%rdi L$open_sse_tail_64_dec_loop: cmpq $16,%rbx jb L$open_sse_tail_16_init subq $16,%rbx movdqu (%rsi),%xmm3 pxor %xmm3,%xmm0 movdqu %xmm0,(%rdi) leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi movdqa %xmm4,%xmm0 movdqa %xmm8,%xmm4 movdqa %xmm12,%xmm8 jmp L$open_sse_tail_64_dec_loop L$open_sse_tail_16_init: movdqa %xmm0,%xmm1 L$open_sse_tail_16: testq %rbx,%rbx jz L$open_sse_finalize pxor %xmm3,%xmm3 leaq -1(%rsi,%rbx,1),%rsi movq %rbx,%r8 L$open_sse_tail_16_compose: pslldq $1,%xmm3 pinsrb $0,(%rsi),%xmm3 subq $1,%rsi subq $1,%r8 jnz L$open_sse_tail_16_compose .byte 102,73,15,126,221 pextrq $1,%xmm3,%r14 pxor %xmm1,%xmm3 L$open_sse_tail_16_extract: pextrb $0,%xmm3,(%rdi) psrldq $1,%xmm3 addq $1,%rdi subq $1,%rbx jne L$open_sse_tail_16_extract addq %r13,%r10 adcq %r14,%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$open_sse_finalize: addq 0+0+32(%rbp),%r10 adcq 8+0+32(%rbp),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movq %r10,%r13 movq %r11,%r14 movq %r12,%r15 subq $-5,%r10 sbbq $-1,%r11 sbbq $3,%r12 cmovcq %r13,%r10 cmovcq %r14,%r11 cmovcq %r15,%r12 addq 0+0+16(%rbp),%r10 adcq 8+0+16(%rbp),%r11 addq $288 + 0 + 32,%rsp popq %r9 movq %r10,(%r9) movq %r11,8(%r9) popq %r15 popq %r14 popq %r13 popq %r12 popq %rbx popq %rbp .byte 0xf3,0xc3 L$open_sse_128: movdqu L$chacha20_consts(%rip),%xmm0 movdqa %xmm0,%xmm1 movdqa %xmm0,%xmm2 movdqu 0(%r9),%xmm4 movdqa %xmm4,%xmm5 movdqa %xmm4,%xmm6 movdqu 16(%r9),%xmm8 movdqa %xmm8,%xmm9 movdqa %xmm8,%xmm10 movdqu 32(%r9),%xmm12 movdqa %xmm12,%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm13,%xmm14 paddd L$sse_inc(%rip),%xmm14 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa %xmm13,%xmm15 movq $10,%r10 L$open_sse_128_rounds: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 decq %r10 jnz L$open_sse_128_rounds paddd L$chacha20_consts(%rip),%xmm0 paddd L$chacha20_consts(%rip),%xmm1 paddd L$chacha20_consts(%rip),%xmm2 paddd %xmm7,%xmm4 paddd %xmm7,%xmm5 paddd %xmm7,%xmm6 paddd %xmm11,%xmm9 paddd %xmm11,%xmm10 paddd %xmm15,%xmm13 paddd L$sse_inc(%rip),%xmm15 paddd %xmm15,%xmm14 pand L$clamp(%rip),%xmm0 movdqa %xmm0,0+0(%rbp) movdqa %xmm4,0+16(%rbp) movq %r8,%r8 call poly_hash_ad_internal L$open_sse_128_xor_hash: cmpq $16,%rbx jb L$open_sse_tail_16 subq $16,%rbx addq 0+0(%rsi),%r10 adcq 8+0(%rsi),%r11 adcq $1,%r12 movdqu 0(%rsi),%xmm3 pxor %xmm3,%xmm1 movdqu %xmm1,0(%rdi) leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movdqa %xmm5,%xmm1 movdqa %xmm9,%xmm5 movdqa %xmm13,%xmm9 movdqa %xmm2,%xmm13 movdqa %xmm6,%xmm2 movdqa %xmm10,%xmm6 movdqa %xmm14,%xmm10 jmp L$open_sse_128_xor_hash .globl _chacha20_poly1305_seal .private_extern _chacha20_poly1305_seal .p2align 6 _chacha20_poly1305_seal: _CET_ENDBR pushq %rbp pushq %rbx pushq %r12 pushq %r13 pushq %r14 pushq %r15 pushq %r9 subq $288 + 0 + 32,%rsp leaq 32(%rsp),%rbp andq $-32,%rbp movq 56(%r9),%rbx addq %rdx,%rbx movq %r8,0+0+32(%rbp) movq %rbx,8+0+32(%rbp) movq %rdx,%rbx movl _OPENSSL_ia32cap_P+8(%rip),%eax andl $288,%eax xorl $288,%eax jz chacha20_poly1305_seal_avx2 cmpq $128,%rbx jbe L$seal_sse_128 movdqa L$chacha20_consts(%rip),%xmm0 movdqu 0(%r9),%xmm4 movdqu 16(%r9),%xmm8 movdqu 32(%r9),%xmm12 movdqa %xmm0,%xmm1 movdqa %xmm0,%xmm2 movdqa %xmm0,%xmm3 movdqa %xmm4,%xmm5 movdqa %xmm4,%xmm6 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm9 movdqa %xmm8,%xmm10 movdqa %xmm8,%xmm11 movdqa %xmm12,%xmm15 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,%xmm14 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,%xmm13 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm4,0+48(%rbp) movdqa %xmm8,0+64(%rbp) movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movdqa %xmm15,0+144(%rbp) movq $10,%r10 L$seal_sse_init_rounds: movdqa %xmm8,0+80(%rbp) movdqa L$rol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movdqa L$rol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 .byte 102,15,58,15,255,4 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,12 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 movdqa %xmm8,0+80(%rbp) movdqa L$rol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movdqa L$rol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 .byte 102,15,58,15,255,12 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,4 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 decq %r10 jnz L$seal_sse_init_rounds paddd L$chacha20_consts(%rip),%xmm3 paddd 0+48(%rbp),%xmm7 paddd 0+64(%rbp),%xmm11 paddd 0+144(%rbp),%xmm15 paddd L$chacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd L$chacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 pand L$clamp(%rip),%xmm3 movdqa %xmm3,0+0(%rbp) movdqa %xmm7,0+16(%rbp) movq %r8,%r8 call poly_hash_ad_internal movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 0(%rdi) movdqu %xmm6,16 + 0(%rdi) movdqu %xmm10,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 64(%rdi) movdqu %xmm5,16 + 64(%rdi) movdqu %xmm9,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) cmpq $192,%rbx ja L$seal_sse_main_init movq $128,%rcx subq $128,%rbx leaq 128(%rsi),%rsi jmp L$seal_sse_128_tail_hash L$seal_sse_main_init: movdqu 0 + 128(%rsi),%xmm3 movdqu 16 + 128(%rsi),%xmm7 movdqu 32 + 128(%rsi),%xmm11 movdqu 48 + 128(%rsi),%xmm15 pxor %xmm3,%xmm0 pxor %xmm7,%xmm4 pxor %xmm11,%xmm8 pxor %xmm12,%xmm15 movdqu %xmm0,0 + 128(%rdi) movdqu %xmm4,16 + 128(%rdi) movdqu %xmm8,32 + 128(%rdi) movdqu %xmm15,48 + 128(%rdi) movq $192,%rcx subq $192,%rbx leaq 192(%rsi),%rsi movq $2,%rcx movq $8,%r8 cmpq $64,%rbx jbe L$seal_sse_tail_64 cmpq $128,%rbx jbe L$seal_sse_tail_128 cmpq $192,%rbx jbe L$seal_sse_tail_192 L$seal_sse_main_loop: movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa %xmm0,%xmm3 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa 0+96(%rbp),%xmm15 paddd L$sse_inc(%rip),%xmm15 movdqa %xmm15,%xmm14 paddd L$sse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movdqa %xmm15,0+144(%rbp) .p2align 5 L$seal_sse_main_rounds: movdqa %xmm8,0+80(%rbp) movdqa L$rol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movdqa L$rol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 .byte 102,15,58,15,255,4 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,12 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 movdqa %xmm8,0+80(%rbp) movdqa L$rol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movdqa L$rol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 .byte 102,15,58,15,255,12 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,4 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 leaq 16(%rdi),%rdi decq %r8 jge L$seal_sse_main_rounds addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi decq %rcx jg L$seal_sse_main_rounds paddd L$chacha20_consts(%rip),%xmm3 paddd 0+48(%rbp),%xmm7 paddd 0+64(%rbp),%xmm11 paddd 0+144(%rbp),%xmm15 paddd L$chacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd L$chacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqa %xmm14,0+80(%rbp) movdqa %xmm14,0+80(%rbp) movdqu 0 + 0(%rsi),%xmm14 pxor %xmm3,%xmm14 movdqu %xmm14,0 + 0(%rdi) movdqu 16 + 0(%rsi),%xmm14 pxor %xmm7,%xmm14 movdqu %xmm14,16 + 0(%rdi) movdqu 32 + 0(%rsi),%xmm14 pxor %xmm11,%xmm14 movdqu %xmm14,32 + 0(%rdi) movdqu 48 + 0(%rsi),%xmm14 pxor %xmm15,%xmm14 movdqu %xmm14,48 + 0(%rdi) movdqa 0+80(%rbp),%xmm14 movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 64(%rdi) movdqu %xmm6,16 + 64(%rdi) movdqu %xmm10,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) movdqu 0 + 128(%rsi),%xmm3 movdqu 16 + 128(%rsi),%xmm7 movdqu 32 + 128(%rsi),%xmm11 movdqu 48 + 128(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 128(%rdi) movdqu %xmm5,16 + 128(%rdi) movdqu %xmm9,32 + 128(%rdi) movdqu %xmm15,48 + 128(%rdi) cmpq $256,%rbx ja L$seal_sse_main_loop_xor movq $192,%rcx subq $192,%rbx leaq 192(%rsi),%rsi jmp L$seal_sse_128_tail_hash L$seal_sse_main_loop_xor: movdqu 0 + 192(%rsi),%xmm3 movdqu 16 + 192(%rsi),%xmm7 movdqu 32 + 192(%rsi),%xmm11 movdqu 48 + 192(%rsi),%xmm15 pxor %xmm3,%xmm0 pxor %xmm7,%xmm4 pxor %xmm11,%xmm8 pxor %xmm12,%xmm15 movdqu %xmm0,0 + 192(%rdi) movdqu %xmm4,16 + 192(%rdi) movdqu %xmm8,32 + 192(%rdi) movdqu %xmm15,48 + 192(%rdi) leaq 256(%rsi),%rsi subq $256,%rbx movq $6,%rcx movq $4,%r8 cmpq $192,%rbx jg L$seal_sse_main_loop movq %rbx,%rcx testq %rbx,%rbx je L$seal_sse_128_tail_hash movq $6,%rcx cmpq $128,%rbx ja L$seal_sse_tail_192 cmpq $64,%rbx ja L$seal_sse_tail_128 L$seal_sse_tail_64: movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa 0+96(%rbp),%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) L$seal_sse_tail_64_rounds_and_x2hash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi L$seal_sse_tail_64_rounds_and_x1hash: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi decq %rcx jg L$seal_sse_tail_64_rounds_and_x2hash decq %r8 jge L$seal_sse_tail_64_rounds_and_x1hash paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 jmp L$seal_sse_128_tail_xor L$seal_sse_tail_128: movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa 0+96(%rbp),%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) L$seal_sse_tail_128_rounds_and_x2hash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi L$seal_sse_tail_128_rounds_and_x1hash: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 leaq 16(%rdi),%rdi decq %rcx jg L$seal_sse_tail_128_rounds_and_x2hash decq %r8 jge L$seal_sse_tail_128_rounds_and_x1hash paddd L$chacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 0(%rdi) movdqu %xmm5,16 + 0(%rdi) movdqu %xmm9,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) movq $64,%rcx subq $64,%rbx leaq 64(%rsi),%rsi jmp L$seal_sse_128_tail_hash L$seal_sse_tail_192: movdqa L$chacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa 0+96(%rbp),%xmm14 paddd L$sse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) L$seal_sse_tail_192_rounds_and_x2hash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi L$seal_sse_tail_192_rounds_and_x1hash: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 leaq 16(%rdi),%rdi decq %rcx jg L$seal_sse_tail_192_rounds_and_x2hash decq %r8 jge L$seal_sse_tail_192_rounds_and_x1hash paddd L$chacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd L$chacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd L$chacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 0(%rdi) movdqu %xmm6,16 + 0(%rdi) movdqu %xmm10,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 64(%rdi) movdqu %xmm5,16 + 64(%rdi) movdqu %xmm9,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) movq $128,%rcx subq $128,%rbx leaq 128(%rsi),%rsi L$seal_sse_128_tail_hash: cmpq $16,%rcx jb L$seal_sse_128_tail_xor addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 subq $16,%rcx leaq 16(%rdi),%rdi jmp L$seal_sse_128_tail_hash L$seal_sse_128_tail_xor: cmpq $16,%rbx jb L$seal_sse_tail_16 subq $16,%rbx movdqu 0(%rsi),%xmm3 pxor %xmm3,%xmm0 movdqu %xmm0,0(%rdi) addq 0(%rdi),%r10 adcq 8(%rdi),%r11 adcq $1,%r12 leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movdqa %xmm4,%xmm0 movdqa %xmm8,%xmm4 movdqa %xmm12,%xmm8 movdqa %xmm1,%xmm12 movdqa %xmm5,%xmm1 movdqa %xmm9,%xmm5 movdqa %xmm13,%xmm9 jmp L$seal_sse_128_tail_xor L$seal_sse_tail_16: testq %rbx,%rbx jz L$process_blocks_of_extra_in movq %rbx,%r8 movq %rbx,%rcx leaq -1(%rsi,%rbx,1),%rsi pxor %xmm15,%xmm15 L$seal_sse_tail_16_compose: pslldq $1,%xmm15 pinsrb $0,(%rsi),%xmm15 leaq -1(%rsi),%rsi decq %rcx jne L$seal_sse_tail_16_compose pxor %xmm0,%xmm15 movq %rbx,%rcx movdqu %xmm15,%xmm0 L$seal_sse_tail_16_extract: pextrb $0,%xmm0,(%rdi) psrldq $1,%xmm0 addq $1,%rdi subq $1,%rcx jnz L$seal_sse_tail_16_extract movq 288 + 0 + 32(%rsp),%r9 movq 56(%r9),%r14 movq 48(%r9),%r13 testq %r14,%r14 jz L$process_partial_block movq $16,%r15 subq %rbx,%r15 cmpq %r15,%r14 jge L$load_extra_in movq %r14,%r15 L$load_extra_in: leaq -1(%r13,%r15,1),%rsi addq %r15,%r13 subq %r15,%r14 movq %r13,48(%r9) movq %r14,56(%r9) addq %r15,%r8 pxor %xmm11,%xmm11 L$load_extra_load_loop: pslldq $1,%xmm11 pinsrb $0,(%rsi),%xmm11 leaq -1(%rsi),%rsi subq $1,%r15 jnz L$load_extra_load_loop movq %rbx,%r15 L$load_extra_shift_loop: pslldq $1,%xmm11 subq $1,%r15 jnz L$load_extra_shift_loop leaq L$and_masks(%rip),%r15 shlq $4,%rbx pand -16(%r15,%rbx,1),%xmm15 por %xmm11,%xmm15 .byte 102,77,15,126,253 pextrq $1,%xmm15,%r14 addq %r13,%r10 adcq %r14,%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$process_blocks_of_extra_in: movq 288+32+0 (%rsp),%r9 movq 48(%r9),%rsi movq 56(%r9),%r8 movq %r8,%rcx shrq $4,%r8 L$process_extra_hash_loop: jz process_extra_in_trailer addq 0+0(%rsi),%r10 adcq 8+0(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rsi),%rsi subq $1,%r8 jmp L$process_extra_hash_loop process_extra_in_trailer: andq $15,%rcx movq %rcx,%rbx jz L$do_length_block leaq -1(%rsi,%rcx,1),%rsi L$process_extra_in_trailer_load: pslldq $1,%xmm15 pinsrb $0,(%rsi),%xmm15 leaq -1(%rsi),%rsi subq $1,%rcx jnz L$process_extra_in_trailer_load L$process_partial_block: leaq L$and_masks(%rip),%r15 shlq $4,%rbx pand -16(%r15,%rbx,1),%xmm15 .byte 102,77,15,126,253 pextrq $1,%xmm15,%r14 addq %r13,%r10 adcq %r14,%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$do_length_block: addq 0+0+32(%rbp),%r10 adcq 8+0+32(%rbp),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movq %r10,%r13 movq %r11,%r14 movq %r12,%r15 subq $-5,%r10 sbbq $-1,%r11 sbbq $3,%r12 cmovcq %r13,%r10 cmovcq %r14,%r11 cmovcq %r15,%r12 addq 0+0+16(%rbp),%r10 adcq 8+0+16(%rbp),%r11 addq $288 + 0 + 32,%rsp popq %r9 movq %r10,(%r9) movq %r11,8(%r9) popq %r15 popq %r14 popq %r13 popq %r12 popq %rbx popq %rbp .byte 0xf3,0xc3 L$seal_sse_128: movdqu L$chacha20_consts(%rip),%xmm0 movdqa %xmm0,%xmm1 movdqa %xmm0,%xmm2 movdqu 0(%r9),%xmm4 movdqa %xmm4,%xmm5 movdqa %xmm4,%xmm6 movdqu 16(%r9),%xmm8 movdqa %xmm8,%xmm9 movdqa %xmm8,%xmm10 movdqu 32(%r9),%xmm14 movdqa %xmm14,%xmm12 paddd L$sse_inc(%rip),%xmm12 movdqa %xmm12,%xmm13 paddd L$sse_inc(%rip),%xmm13 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa %xmm12,%xmm15 movq $10,%r10 L$seal_sse_128_rounds: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb L$rol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb L$rol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb L$rol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 decq %r10 jnz L$seal_sse_128_rounds paddd L$chacha20_consts(%rip),%xmm0 paddd L$chacha20_consts(%rip),%xmm1 paddd L$chacha20_consts(%rip),%xmm2 paddd %xmm7,%xmm4 paddd %xmm7,%xmm5 paddd %xmm7,%xmm6 paddd %xmm11,%xmm8 paddd %xmm11,%xmm9 paddd %xmm15,%xmm12 paddd L$sse_inc(%rip),%xmm15 paddd %xmm15,%xmm13 pand L$clamp(%rip),%xmm2 movdqa %xmm2,0+0(%rbp) movdqa %xmm6,0+16(%rbp) movq %r8,%r8 call poly_hash_ad_internal jmp L$seal_sse_128_tail_xor .p2align 6 chacha20_poly1305_open_avx2: vzeroupper vmovdqa L$chacha20_consts(%rip),%ymm0 vbroadcasti128 0(%r9),%ymm4 vbroadcasti128 16(%r9),%ymm8 vbroadcasti128 32(%r9),%ymm12 vpaddd L$avx2_init(%rip),%ymm12,%ymm12 cmpq $192,%rbx jbe L$open_avx2_192 cmpq $320,%rbx jbe L$open_avx2_320 vmovdqa %ymm4,0+64(%rbp) vmovdqa %ymm8,0+96(%rbp) vmovdqa %ymm12,0+160(%rbp) movq $10,%r10 L$open_avx2_init_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 decq %r10 jne L$open_avx2_init_rounds vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand L$clamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 movq %r8,%r8 call poly_hash_ad_internal xorq %rcx,%rcx L$open_avx2_init_hash: addq 0+0(%rsi,%rcx,1),%r10 adcq 8+0(%rsi,%rcx,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq $16,%rcx cmpq $64,%rcx jne L$open_avx2_init_hash vpxor 0(%rsi),%ymm0,%ymm0 vpxor 32(%rsi),%ymm4,%ymm4 vmovdqu %ymm0,0(%rdi) vmovdqu %ymm4,32(%rdi) leaq 64(%rsi),%rsi leaq 64(%rdi),%rdi subq $64,%rbx L$open_avx2_main_loop: cmpq $512,%rbx jb L$open_avx2_main_loop_done vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) xorq %rcx,%rcx L$open_avx2_main_loop_rounds: addq 0+0(%rsi,%rcx,1),%r10 adcq 8+0(%rsi,%rcx,1),%r11 adcq $1,%r12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 addq %rax,%r15 adcq %rdx,%r9 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 addq 0+16(%rsi,%rcx,1),%r10 adcq 8+16(%rsi,%rcx,1),%r11 adcq $1,%r12 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 addq %rax,%r15 adcq %rdx,%r9 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 addq 0+32(%rsi,%rcx,1),%r10 adcq 8+32(%rsi,%rcx,1),%r11 adcq $1,%r12 leaq 48(%rcx),%rcx vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 addq %rax,%r15 adcq %rdx,%r9 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpalignr $4,%ymm12,%ymm12,%ymm12 cmpq $60*8,%rcx jne L$open_avx2_main_loop_rounds vpaddd L$chacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm0,0+128(%rbp) addq 0+60*8(%rsi),%r10 adcq 8+60*8(%rsi),%r11 adcq $1,%r12 vperm2i128 $0x02,%ymm3,%ymm7,%ymm0 vperm2i128 $0x13,%ymm3,%ymm7,%ymm7 vperm2i128 $0x02,%ymm11,%ymm15,%ymm3 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vpxor 0+0(%rsi),%ymm0,%ymm0 vpxor 32+0(%rsi),%ymm3,%ymm3 vpxor 64+0(%rsi),%ymm7,%ymm7 vpxor 96+0(%rsi),%ymm11,%ymm11 vmovdqu %ymm0,0+0(%rdi) vmovdqu %ymm3,32+0(%rdi) vmovdqu %ymm7,64+0(%rdi) vmovdqu %ymm11,96+0(%rdi) vmovdqa 0+128(%rbp),%ymm0 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm2,%ymm2 vpxor 64+128(%rsi),%ymm6,%ymm6 vpxor 96+128(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm2,32+128(%rdi) vmovdqu %ymm6,64+128(%rdi) vmovdqu %ymm10,96+128(%rdi) addq 0+60*8+16(%rsi),%r10 adcq 8+60*8+16(%rsi),%r11 adcq $1,%r12 vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+256(%rsi),%ymm3,%ymm3 vpxor 32+256(%rsi),%ymm1,%ymm1 vpxor 64+256(%rsi),%ymm5,%ymm5 vpxor 96+256(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+256(%rdi) vmovdqu %ymm1,32+256(%rdi) vmovdqu %ymm5,64+256(%rdi) vmovdqu %ymm9,96+256(%rdi) movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vperm2i128 $0x13,%ymm0,%ymm4,%ymm4 vperm2i128 $0x02,%ymm8,%ymm12,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm8 vpxor 0+384(%rsi),%ymm3,%ymm3 vpxor 32+384(%rsi),%ymm0,%ymm0 vpxor 64+384(%rsi),%ymm4,%ymm4 vpxor 96+384(%rsi),%ymm8,%ymm8 vmovdqu %ymm3,0+384(%rdi) vmovdqu %ymm0,32+384(%rdi) vmovdqu %ymm4,64+384(%rdi) vmovdqu %ymm8,96+384(%rdi) leaq 512(%rsi),%rsi leaq 512(%rdi),%rdi subq $512,%rbx jmp L$open_avx2_main_loop L$open_avx2_main_loop_done: testq %rbx,%rbx vzeroupper je L$open_sse_finalize cmpq $384,%rbx ja L$open_avx2_tail_512 cmpq $256,%rbx ja L$open_avx2_tail_384 cmpq $128,%rbx ja L$open_avx2_tail_256 vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) xorq %r8,%r8 movq %rbx,%rcx andq $-16,%rcx testq %rcx,%rcx je L$open_avx2_tail_128_rounds L$open_avx2_tail_128_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$open_avx2_tail_128_rounds: addq $16,%r8 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 cmpq %rcx,%r8 jb L$open_avx2_tail_128_rounds_and_x1hash cmpq $160,%r8 jne L$open_avx2_tail_128_rounds vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 jmp L$open_avx2_tail_128_xor L$open_avx2_tail_256: vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) movq %rbx,0+128(%rbp) movq %rbx,%rcx subq $128,%rcx shrq $4,%rcx movq $10,%r8 cmpq $10,%rcx cmovgq %r8,%rcx movq %rsi,%rbx xorq %r8,%r8 L$open_avx2_tail_256_rounds_and_x1hash: addq 0+0(%rbx),%r10 adcq 8+0(%rbx),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rbx),%rbx L$open_avx2_tail_256_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 incq %r8 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 cmpq %rcx,%r8 jb L$open_avx2_tail_256_rounds_and_x1hash cmpq $10,%r8 jne L$open_avx2_tail_256_rounds movq %rbx,%r8 subq %rsi,%rbx movq %rbx,%rcx movq 0+128(%rbp),%rbx L$open_avx2_tail_256_hash: addq $16,%rcx cmpq %rbx,%rcx jg L$open_avx2_tail_256_done addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 jmp L$open_avx2_tail_256_hash L$open_avx2_tail_256_done: vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+0(%rsi),%ymm3,%ymm3 vpxor 32+0(%rsi),%ymm1,%ymm1 vpxor 64+0(%rsi),%ymm5,%ymm5 vpxor 96+0(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+0(%rdi) vmovdqu %ymm1,32+0(%rdi) vmovdqu %ymm5,64+0(%rdi) vmovdqu %ymm9,96+0(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 leaq 128(%rsi),%rsi leaq 128(%rdi),%rdi subq $128,%rbx jmp L$open_avx2_tail_128_xor L$open_avx2_tail_384: vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) movq %rbx,0+128(%rbp) movq %rbx,%rcx subq $256,%rcx shrq $4,%rcx addq $6,%rcx movq $10,%r8 cmpq $10,%rcx cmovgq %r8,%rcx movq %rsi,%rbx xorq %r8,%r8 L$open_avx2_tail_384_rounds_and_x2hash: addq 0+0(%rbx),%r10 adcq 8+0(%rbx),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rbx),%rbx L$open_avx2_tail_384_rounds_and_x1hash: vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm6,%ymm6,%ymm6 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 addq 0+0(%rbx),%r10 adcq 8+0(%rbx),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rbx),%rbx incq %r8 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 cmpq %rcx,%r8 jb L$open_avx2_tail_384_rounds_and_x2hash cmpq $10,%r8 jne L$open_avx2_tail_384_rounds_and_x1hash movq %rbx,%r8 subq %rsi,%rbx movq %rbx,%rcx movq 0+128(%rbp),%rbx L$open_avx2_384_tail_hash: addq $16,%rcx cmpq %rbx,%rcx jg L$open_avx2_384_tail_done addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 jmp L$open_avx2_384_tail_hash L$open_avx2_384_tail_done: vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+0(%rsi),%ymm3,%ymm3 vpxor 32+0(%rsi),%ymm2,%ymm2 vpxor 64+0(%rsi),%ymm6,%ymm6 vpxor 96+0(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+0(%rdi) vmovdqu %ymm2,32+0(%rdi) vmovdqu %ymm6,64+0(%rdi) vmovdqu %ymm10,96+0(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm1,%ymm1 vpxor 64+128(%rsi),%ymm5,%ymm5 vpxor 96+128(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm1,32+128(%rdi) vmovdqu %ymm5,64+128(%rdi) vmovdqu %ymm9,96+128(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 leaq 256(%rsi),%rsi leaq 256(%rdi),%rdi subq $256,%rbx jmp L$open_avx2_tail_128_xor L$open_avx2_tail_512: vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) xorq %rcx,%rcx movq %rsi,%r8 L$open_avx2_tail_512_rounds_and_x2hash: addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 L$open_avx2_tail_512_rounds_and_x1hash: vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 addq 0+16(%r8),%r10 adcq 8+16(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%r8),%r8 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm12,%ymm12,%ymm12 incq %rcx cmpq $4,%rcx jl L$open_avx2_tail_512_rounds_and_x2hash cmpq $10,%rcx jne L$open_avx2_tail_512_rounds_and_x1hash movq %rbx,%rcx subq $384,%rcx andq $-16,%rcx L$open_avx2_tail_512_hash: testq %rcx,%rcx je L$open_avx2_tail_512_done addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 subq $16,%rcx jmp L$open_avx2_tail_512_hash L$open_avx2_tail_512_done: vpaddd L$chacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm0,0+128(%rbp) vperm2i128 $0x02,%ymm3,%ymm7,%ymm0 vperm2i128 $0x13,%ymm3,%ymm7,%ymm7 vperm2i128 $0x02,%ymm11,%ymm15,%ymm3 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vpxor 0+0(%rsi),%ymm0,%ymm0 vpxor 32+0(%rsi),%ymm3,%ymm3 vpxor 64+0(%rsi),%ymm7,%ymm7 vpxor 96+0(%rsi),%ymm11,%ymm11 vmovdqu %ymm0,0+0(%rdi) vmovdqu %ymm3,32+0(%rdi) vmovdqu %ymm7,64+0(%rdi) vmovdqu %ymm11,96+0(%rdi) vmovdqa 0+128(%rbp),%ymm0 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm2,%ymm2 vpxor 64+128(%rsi),%ymm6,%ymm6 vpxor 96+128(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm2,32+128(%rdi) vmovdqu %ymm6,64+128(%rdi) vmovdqu %ymm10,96+128(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+256(%rsi),%ymm3,%ymm3 vpxor 32+256(%rsi),%ymm1,%ymm1 vpxor 64+256(%rsi),%ymm5,%ymm5 vpxor 96+256(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+256(%rdi) vmovdqu %ymm1,32+256(%rdi) vmovdqu %ymm5,64+256(%rdi) vmovdqu %ymm9,96+256(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 leaq 384(%rsi),%rsi leaq 384(%rdi),%rdi subq $384,%rbx L$open_avx2_tail_128_xor: cmpq $32,%rbx jb L$open_avx2_tail_32_xor subq $32,%rbx vpxor (%rsi),%ymm0,%ymm0 vmovdqu %ymm0,(%rdi) leaq 32(%rsi),%rsi leaq 32(%rdi),%rdi vmovdqa %ymm4,%ymm0 vmovdqa %ymm8,%ymm4 vmovdqa %ymm12,%ymm8 jmp L$open_avx2_tail_128_xor L$open_avx2_tail_32_xor: cmpq $16,%rbx vmovdqa %xmm0,%xmm1 jb L$open_avx2_exit subq $16,%rbx vpxor (%rsi),%xmm0,%xmm1 vmovdqu %xmm1,(%rdi) leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi vperm2i128 $0x11,%ymm0,%ymm0,%ymm0 vmovdqa %xmm0,%xmm1 L$open_avx2_exit: vzeroupper jmp L$open_sse_tail_16 L$open_avx2_192: vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vpaddd L$avx2_inc(%rip),%ymm12,%ymm13 vmovdqa %ymm12,%ymm11 vmovdqa %ymm13,%ymm15 movq $10,%r10 L$open_avx2_192_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 decq %r10 jne L$open_avx2_192_rounds vpaddd %ymm2,%ymm0,%ymm0 vpaddd %ymm2,%ymm1,%ymm1 vpaddd %ymm6,%ymm4,%ymm4 vpaddd %ymm6,%ymm5,%ymm5 vpaddd %ymm10,%ymm8,%ymm8 vpaddd %ymm10,%ymm9,%ymm9 vpaddd %ymm11,%ymm12,%ymm12 vpaddd %ymm15,%ymm13,%ymm13 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand L$clamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 vperm2i128 $0x02,%ymm1,%ymm5,%ymm8 vperm2i128 $0x02,%ymm9,%ymm13,%ymm12 vperm2i128 $0x13,%ymm1,%ymm5,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm5 L$open_avx2_short: movq %r8,%r8 call poly_hash_ad_internal L$open_avx2_short_hash_and_xor_loop: cmpq $32,%rbx jb L$open_avx2_short_tail_32 subq $32,%rbx addq 0+0(%rsi),%r10 adcq 8+0(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq 0+16(%rsi),%r10 adcq 8+16(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor (%rsi),%ymm0,%ymm0 vmovdqu %ymm0,(%rdi) leaq 32(%rsi),%rsi leaq 32(%rdi),%rdi vmovdqa %ymm4,%ymm0 vmovdqa %ymm8,%ymm4 vmovdqa %ymm12,%ymm8 vmovdqa %ymm1,%ymm12 vmovdqa %ymm5,%ymm1 vmovdqa %ymm9,%ymm5 vmovdqa %ymm13,%ymm9 vmovdqa %ymm2,%ymm13 vmovdqa %ymm6,%ymm2 jmp L$open_avx2_short_hash_and_xor_loop L$open_avx2_short_tail_32: cmpq $16,%rbx vmovdqa %xmm0,%xmm1 jb L$open_avx2_short_tail_32_exit subq $16,%rbx addq 0+0(%rsi),%r10 adcq 8+0(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor (%rsi),%xmm0,%xmm3 vmovdqu %xmm3,(%rdi) leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi vextracti128 $1,%ymm0,%xmm1 L$open_avx2_short_tail_32_exit: vzeroupper jmp L$open_sse_tail_16 L$open_avx2_320: vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vpaddd L$avx2_inc(%rip),%ymm12,%ymm13 vpaddd L$avx2_inc(%rip),%ymm13,%ymm14 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) movq $10,%r10 L$open_avx2_320_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm6,%ymm6,%ymm6 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 decq %r10 jne L$open_avx2_320_rounds vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd %ymm7,%ymm4,%ymm4 vpaddd %ymm7,%ymm5,%ymm5 vpaddd %ymm7,%ymm6,%ymm6 vpaddd %ymm11,%ymm8,%ymm8 vpaddd %ymm11,%ymm9,%ymm9 vpaddd %ymm11,%ymm10,%ymm10 vpaddd 0+160(%rbp),%ymm12,%ymm12 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd 0+224(%rbp),%ymm14,%ymm14 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand L$clamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 vperm2i128 $0x02,%ymm1,%ymm5,%ymm8 vperm2i128 $0x02,%ymm9,%ymm13,%ymm12 vperm2i128 $0x13,%ymm1,%ymm5,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm5 vperm2i128 $0x02,%ymm2,%ymm6,%ymm9 vperm2i128 $0x02,%ymm10,%ymm14,%ymm13 vperm2i128 $0x13,%ymm2,%ymm6,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm6 jmp L$open_avx2_short .p2align 6 chacha20_poly1305_seal_avx2: vzeroupper vmovdqa L$chacha20_consts(%rip),%ymm0 vbroadcasti128 0(%r9),%ymm4 vbroadcasti128 16(%r9),%ymm8 vbroadcasti128 32(%r9),%ymm12 vpaddd L$avx2_init(%rip),%ymm12,%ymm12 cmpq $192,%rbx jbe L$seal_avx2_192 cmpq $320,%rbx jbe L$seal_avx2_320 vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm4,%ymm7 vmovdqa %ymm4,0+64(%rbp) vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vmovdqa %ymm8,%ymm11 vmovdqa %ymm8,0+96(%rbp) vmovdqa %ymm12,%ymm15 vpaddd L$avx2_inc(%rip),%ymm15,%ymm14 vpaddd L$avx2_inc(%rip),%ymm14,%ymm13 vpaddd L$avx2_inc(%rip),%ymm13,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm15,0+256(%rbp) movq $10,%r10 L$seal_avx2_init_rounds: vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm12,%ymm12,%ymm12 decq %r10 jnz L$seal_avx2_init_rounds vpaddd L$chacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vperm2i128 $0x02,%ymm3,%ymm7,%ymm15 vperm2i128 $0x13,%ymm3,%ymm7,%ymm3 vpand L$clamp(%rip),%ymm15,%ymm15 vmovdqa %ymm15,0+0(%rbp) movq %r8,%r8 call poly_hash_ad_internal vpxor 0(%rsi),%ymm3,%ymm3 vpxor 32(%rsi),%ymm11,%ymm11 vmovdqu %ymm3,0(%rdi) vmovdqu %ymm11,32(%rdi) vperm2i128 $0x02,%ymm2,%ymm6,%ymm15 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+64(%rsi),%ymm15,%ymm15 vpxor 32+64(%rsi),%ymm2,%ymm2 vpxor 64+64(%rsi),%ymm6,%ymm6 vpxor 96+64(%rsi),%ymm10,%ymm10 vmovdqu %ymm15,0+64(%rdi) vmovdqu %ymm2,32+64(%rdi) vmovdqu %ymm6,64+64(%rdi) vmovdqu %ymm10,96+64(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm15 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+192(%rsi),%ymm15,%ymm15 vpxor 32+192(%rsi),%ymm1,%ymm1 vpxor 64+192(%rsi),%ymm5,%ymm5 vpxor 96+192(%rsi),%ymm9,%ymm9 vmovdqu %ymm15,0+192(%rdi) vmovdqu %ymm1,32+192(%rdi) vmovdqu %ymm5,64+192(%rdi) vmovdqu %ymm9,96+192(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm15 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm15,%ymm8 leaq 320(%rsi),%rsi subq $320,%rbx movq $320,%rcx cmpq $128,%rbx jbe L$seal_avx2_short_hash_remainder vpxor 0(%rsi),%ymm0,%ymm0 vpxor 32(%rsi),%ymm4,%ymm4 vpxor 64(%rsi),%ymm8,%ymm8 vpxor 96(%rsi),%ymm12,%ymm12 vmovdqu %ymm0,320(%rdi) vmovdqu %ymm4,352(%rdi) vmovdqu %ymm8,384(%rdi) vmovdqu %ymm12,416(%rdi) leaq 128(%rsi),%rsi subq $128,%rbx movq $8,%rcx movq $2,%r8 cmpq $128,%rbx jbe L$seal_avx2_tail_128 cmpq $256,%rbx jbe L$seal_avx2_tail_256 cmpq $384,%rbx jbe L$seal_avx2_tail_384 cmpq $512,%rbx jbe L$seal_avx2_tail_512 vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 subq $16,%rdi movq $9,%rcx jmp L$seal_avx2_main_loop_rounds_entry .p2align 5 L$seal_avx2_main_loop: vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) movq $10,%rcx .p2align 5 L$seal_avx2_main_loop_rounds: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 addq %rax,%r15 adcq %rdx,%r9 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 L$seal_avx2_main_loop_rounds_entry: vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 addq %rax,%r15 adcq %rdx,%r9 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 addq 0+32(%rdi),%r10 adcq 8+32(%rdi),%r11 adcq $1,%r12 leaq 48(%rdi),%rdi vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 addq %rax,%r15 adcq %rdx,%r9 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpalignr $4,%ymm12,%ymm12,%ymm12 decq %rcx jne L$seal_avx2_main_loop_rounds vpaddd L$chacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm0,0+128(%rbp) addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi vperm2i128 $0x02,%ymm3,%ymm7,%ymm0 vperm2i128 $0x13,%ymm3,%ymm7,%ymm7 vperm2i128 $0x02,%ymm11,%ymm15,%ymm3 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vpxor 0+0(%rsi),%ymm0,%ymm0 vpxor 32+0(%rsi),%ymm3,%ymm3 vpxor 64+0(%rsi),%ymm7,%ymm7 vpxor 96+0(%rsi),%ymm11,%ymm11 vmovdqu %ymm0,0+0(%rdi) vmovdqu %ymm3,32+0(%rdi) vmovdqu %ymm7,64+0(%rdi) vmovdqu %ymm11,96+0(%rdi) vmovdqa 0+128(%rbp),%ymm0 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm2,%ymm2 vpxor 64+128(%rsi),%ymm6,%ymm6 vpxor 96+128(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm2,32+128(%rdi) vmovdqu %ymm6,64+128(%rdi) vmovdqu %ymm10,96+128(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+256(%rsi),%ymm3,%ymm3 vpxor 32+256(%rsi),%ymm1,%ymm1 vpxor 64+256(%rsi),%ymm5,%ymm5 vpxor 96+256(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+256(%rdi) vmovdqu %ymm1,32+256(%rdi) vmovdqu %ymm5,64+256(%rdi) vmovdqu %ymm9,96+256(%rdi) vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vperm2i128 $0x13,%ymm0,%ymm4,%ymm4 vperm2i128 $0x02,%ymm8,%ymm12,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm8 vpxor 0+384(%rsi),%ymm3,%ymm3 vpxor 32+384(%rsi),%ymm0,%ymm0 vpxor 64+384(%rsi),%ymm4,%ymm4 vpxor 96+384(%rsi),%ymm8,%ymm8 vmovdqu %ymm3,0+384(%rdi) vmovdqu %ymm0,32+384(%rdi) vmovdqu %ymm4,64+384(%rdi) vmovdqu %ymm8,96+384(%rdi) leaq 512(%rsi),%rsi subq $512,%rbx cmpq $512,%rbx jg L$seal_avx2_main_loop addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi movq $10,%rcx xorq %r8,%r8 cmpq $384,%rbx ja L$seal_avx2_tail_512 cmpq $256,%rbx ja L$seal_avx2_tail_384 cmpq $128,%rbx ja L$seal_avx2_tail_256 L$seal_avx2_tail_128: vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) L$seal_avx2_tail_128_rounds_and_3xhash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi L$seal_avx2_tail_128_rounds_and_2xhash: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi decq %rcx jg L$seal_avx2_tail_128_rounds_and_3xhash decq %r8 jge L$seal_avx2_tail_128_rounds_and_2xhash vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 jmp L$seal_avx2_short_loop L$seal_avx2_tail_256: vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) L$seal_avx2_tail_256_rounds_and_3xhash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi L$seal_avx2_tail_256_rounds_and_2xhash: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi decq %rcx jg L$seal_avx2_tail_256_rounds_and_3xhash decq %r8 jge L$seal_avx2_tail_256_rounds_and_2xhash vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+0(%rsi),%ymm3,%ymm3 vpxor 32+0(%rsi),%ymm1,%ymm1 vpxor 64+0(%rsi),%ymm5,%ymm5 vpxor 96+0(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+0(%rdi) vmovdqu %ymm1,32+0(%rdi) vmovdqu %ymm5,64+0(%rdi) vmovdqu %ymm9,96+0(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 movq $128,%rcx leaq 128(%rsi),%rsi subq $128,%rbx jmp L$seal_avx2_short_hash_remainder L$seal_avx2_tail_384: vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) L$seal_avx2_tail_384_rounds_and_3xhash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi L$seal_avx2_tail_384_rounds_and_2xhash: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm6,%ymm6,%ymm6 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 leaq 32(%rdi),%rdi decq %rcx jg L$seal_avx2_tail_384_rounds_and_3xhash decq %r8 jge L$seal_avx2_tail_384_rounds_and_2xhash vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+0(%rsi),%ymm3,%ymm3 vpxor 32+0(%rsi),%ymm2,%ymm2 vpxor 64+0(%rsi),%ymm6,%ymm6 vpxor 96+0(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+0(%rdi) vmovdqu %ymm2,32+0(%rdi) vmovdqu %ymm6,64+0(%rdi) vmovdqu %ymm10,96+0(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm1,%ymm1 vpxor 64+128(%rsi),%ymm5,%ymm5 vpxor 96+128(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm1,32+128(%rdi) vmovdqu %ymm5,64+128(%rdi) vmovdqu %ymm9,96+128(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 movq $256,%rcx leaq 256(%rsi),%rsi subq $256,%rbx jmp L$seal_avx2_short_hash_remainder L$seal_avx2_tail_512: vmovdqa L$chacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa L$avx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) L$seal_avx2_tail_512_rounds_and_3xhash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi L$seal_avx2_tail_512_rounds_and_2xhash: vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 addq %rax,%r15 adcq %rdx,%r9 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa L$rol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa L$rol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm12,%ymm12,%ymm12 addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi decq %rcx jg L$seal_avx2_tail_512_rounds_and_3xhash decq %r8 jge L$seal_avx2_tail_512_rounds_and_2xhash vpaddd L$chacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm0,0+128(%rbp) vperm2i128 $0x02,%ymm3,%ymm7,%ymm0 vperm2i128 $0x13,%ymm3,%ymm7,%ymm7 vperm2i128 $0x02,%ymm11,%ymm15,%ymm3 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vpxor 0+0(%rsi),%ymm0,%ymm0 vpxor 32+0(%rsi),%ymm3,%ymm3 vpxor 64+0(%rsi),%ymm7,%ymm7 vpxor 96+0(%rsi),%ymm11,%ymm11 vmovdqu %ymm0,0+0(%rdi) vmovdqu %ymm3,32+0(%rdi) vmovdqu %ymm7,64+0(%rdi) vmovdqu %ymm11,96+0(%rdi) vmovdqa 0+128(%rbp),%ymm0 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm2,%ymm2 vpxor 64+128(%rsi),%ymm6,%ymm6 vpxor 96+128(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm2,32+128(%rdi) vmovdqu %ymm6,64+128(%rdi) vmovdqu %ymm10,96+128(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+256(%rsi),%ymm3,%ymm3 vpxor 32+256(%rsi),%ymm1,%ymm1 vpxor 64+256(%rsi),%ymm5,%ymm5 vpxor 96+256(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+256(%rdi) vmovdqu %ymm1,32+256(%rdi) vmovdqu %ymm5,64+256(%rdi) vmovdqu %ymm9,96+256(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 movq $384,%rcx leaq 384(%rsi),%rsi subq $384,%rbx jmp L$seal_avx2_short_hash_remainder L$seal_avx2_320: vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vpaddd L$avx2_inc(%rip),%ymm12,%ymm13 vpaddd L$avx2_inc(%rip),%ymm13,%ymm14 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) movq $10,%r10 L$seal_avx2_320_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm6,%ymm6,%ymm6 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb L$rol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 decq %r10 jne L$seal_avx2_320_rounds vpaddd L$chacha20_consts(%rip),%ymm0,%ymm0 vpaddd L$chacha20_consts(%rip),%ymm1,%ymm1 vpaddd L$chacha20_consts(%rip),%ymm2,%ymm2 vpaddd %ymm7,%ymm4,%ymm4 vpaddd %ymm7,%ymm5,%ymm5 vpaddd %ymm7,%ymm6,%ymm6 vpaddd %ymm11,%ymm8,%ymm8 vpaddd %ymm11,%ymm9,%ymm9 vpaddd %ymm11,%ymm10,%ymm10 vpaddd 0+160(%rbp),%ymm12,%ymm12 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd 0+224(%rbp),%ymm14,%ymm14 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand L$clamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 vperm2i128 $0x02,%ymm1,%ymm5,%ymm8 vperm2i128 $0x02,%ymm9,%ymm13,%ymm12 vperm2i128 $0x13,%ymm1,%ymm5,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm5 vperm2i128 $0x02,%ymm2,%ymm6,%ymm9 vperm2i128 $0x02,%ymm10,%ymm14,%ymm13 vperm2i128 $0x13,%ymm2,%ymm6,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm6 jmp L$seal_avx2_short L$seal_avx2_192: vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vpaddd L$avx2_inc(%rip),%ymm12,%ymm13 vmovdqa %ymm12,%ymm11 vmovdqa %ymm13,%ymm15 movq $10,%r10 L$seal_avx2_192_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb L$rol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb L$rol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 decq %r10 jne L$seal_avx2_192_rounds vpaddd %ymm2,%ymm0,%ymm0 vpaddd %ymm2,%ymm1,%ymm1 vpaddd %ymm6,%ymm4,%ymm4 vpaddd %ymm6,%ymm5,%ymm5 vpaddd %ymm10,%ymm8,%ymm8 vpaddd %ymm10,%ymm9,%ymm9 vpaddd %ymm11,%ymm12,%ymm12 vpaddd %ymm15,%ymm13,%ymm13 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand L$clamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 vperm2i128 $0x02,%ymm1,%ymm5,%ymm8 vperm2i128 $0x02,%ymm9,%ymm13,%ymm12 vperm2i128 $0x13,%ymm1,%ymm5,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm5 L$seal_avx2_short: movq %r8,%r8 call poly_hash_ad_internal xorq %rcx,%rcx L$seal_avx2_short_hash_remainder: cmpq $16,%rcx jb L$seal_avx2_short_loop addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 subq $16,%rcx addq $16,%rdi jmp L$seal_avx2_short_hash_remainder L$seal_avx2_short_loop: cmpq $32,%rbx jb L$seal_avx2_short_tail subq $32,%rbx vpxor (%rsi),%ymm0,%ymm0 vmovdqu %ymm0,(%rdi) leaq 32(%rsi),%rsi addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi vmovdqa %ymm4,%ymm0 vmovdqa %ymm8,%ymm4 vmovdqa %ymm12,%ymm8 vmovdqa %ymm1,%ymm12 vmovdqa %ymm5,%ymm1 vmovdqa %ymm9,%ymm5 vmovdqa %ymm13,%ymm9 vmovdqa %ymm2,%ymm13 vmovdqa %ymm6,%ymm2 jmp L$seal_avx2_short_loop L$seal_avx2_short_tail: cmpq $16,%rbx jb L$seal_avx2_exit subq $16,%rbx vpxor (%rsi),%xmm0,%xmm3 vmovdqu %xmm3,(%rdi) leaq 16(%rsi),%rsi addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi vextracti128 $1,%ymm0,%xmm0 L$seal_avx2_exit: vzeroupper jmp L$seal_sse_tail_16 #endif
marvin-hansen/iggy-streaming-system
89,384
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/cipher_extra/aesni-sha256-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _aesni_cbc_sha256_enc .private_extern _aesni_cbc_sha256_enc .p2align 4 _aesni_cbc_sha256_enc: leaq _OPENSSL_ia32cap_P(%rip),%r11 movl $1,%eax cmpq $0,%rdi je L$probe movl 0(%r11),%eax movq 4(%r11),%r10 btq $61,%r10 jc aesni_cbc_sha256_enc_shaext movq %r10,%r11 shrq $32,%r11 testl $2048,%r10d jnz aesni_cbc_sha256_enc_xop andl $296,%r11d cmpl $296,%r11d je aesni_cbc_sha256_enc_avx2 andl $268435456,%r10d jnz aesni_cbc_sha256_enc_avx ud2 xorl %eax,%eax cmpq $0,%rdi je L$probe ud2 L$probe: .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 K256: .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0,0,0,0, 0,0,0,0, -1,-1,-1,-1 .long 0,0,0,0, 0,0,0,0 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .p2align 6 .p2align 6 aesni_cbc_sha256_enc_xop: L$xop_shortcut: movq 8(%rsp),%r10 movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $128,%rsp andq $-64,%rsp shlq $6,%rdx subq %rdi,%rsi subq %rdi,%r10 addq %rdi,%rdx movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %r8,64+32(%rsp) movq %r9,64+40(%rsp) movq %r10,64+48(%rsp) movq %rax,120(%rsp) L$prologue_xop: vzeroall movq %rdi,%r12 leaq 128(%rcx),%rdi leaq K256+544(%rip),%r13 movl 240-128(%rdi),%r14d movq %r9,%r15 movq %r10,%rsi vmovdqu (%r8),%xmm8 subq $9,%r14 movl 0(%r15),%eax movl 4(%r15),%ebx movl 8(%r15),%ecx movl 12(%r15),%edx movl 16(%r15),%r8d movl 20(%r15),%r9d movl 24(%r15),%r10d movl 28(%r15),%r11d vmovdqa 0(%r13,%r14,8),%xmm14 vmovdqa 16(%r13,%r14,8),%xmm13 vmovdqa 32(%r13,%r14,8),%xmm12 vmovdqu 0-128(%rdi),%xmm10 jmp L$loop_xop .p2align 4 L$loop_xop: vmovdqa K256+512(%rip),%xmm7 vmovdqu 0(%rsi,%r12,1),%xmm0 vmovdqu 16(%rsi,%r12,1),%xmm1 vmovdqu 32(%rsi,%r12,1),%xmm2 vmovdqu 48(%rsi,%r12,1),%xmm3 vpshufb %xmm7,%xmm0,%xmm0 leaq K256(%rip),%rbp vpshufb %xmm7,%xmm1,%xmm1 vpshufb %xmm7,%xmm2,%xmm2 vpaddd 0(%rbp),%xmm0,%xmm4 vpshufb %xmm7,%xmm3,%xmm3 vpaddd 32(%rbp),%xmm1,%xmm5 vpaddd 64(%rbp),%xmm2,%xmm6 vpaddd 96(%rbp),%xmm3,%xmm7 vmovdqa %xmm4,0(%rsp) movl %eax,%r14d vmovdqa %xmm5,16(%rsp) movl %ebx,%esi vmovdqa %xmm6,32(%rsp) xorl %ecx,%esi vmovdqa %xmm7,48(%rsp) movl %r8d,%r13d jmp L$xop_00_47 .p2align 4 L$xop_00_47: subq $-32*4,%rbp vmovdqu (%r12),%xmm9 movq %r12,64+0(%rsp) vpalignr $4,%xmm0,%xmm1,%xmm4 rorl $14,%r13d movl %r14d,%eax vpalignr $4,%xmm2,%xmm3,%xmm7 movl %r9d,%r12d xorl %r8d,%r13d .byte 143,232,120,194,236,14 rorl $9,%r14d xorl %r10d,%r12d vpsrld $3,%xmm4,%xmm4 rorl $5,%r13d xorl %eax,%r14d vpaddd %xmm7,%xmm0,%xmm0 andl %r8d,%r12d vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d .byte 143,232,120,194,245,11 rorl $11,%r14d xorl %r10d,%r12d vpxor %xmm5,%xmm4,%xmm4 xorl %ebx,%r15d rorl $6,%r13d addl %r12d,%r11d andl %r15d,%esi .byte 143,232,120,194,251,13 xorl %eax,%r14d addl %r13d,%r11d vpxor %xmm6,%xmm4,%xmm4 xorl %ebx,%esi addl %r11d,%edx vpsrld $10,%xmm3,%xmm6 rorl $2,%r14d addl %esi,%r11d vpaddd %xmm4,%xmm0,%xmm0 movl %edx,%r13d addl %r11d,%r14d .byte 143,232,120,194,239,2 rorl $14,%r13d movl %r14d,%r11d vpxor %xmm6,%xmm7,%xmm7 movl %r8d,%r12d xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%r12d vpxor %xmm5,%xmm7,%xmm7 rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d vpxor %xmm8,%xmm9,%xmm9 xorl %edx,%r13d vpsrldq $8,%xmm7,%xmm7 addl 4(%rsp),%r10d movl %r11d,%esi rorl $11,%r14d xorl %r9d,%r12d vpaddd %xmm7,%xmm0,%xmm0 xorl %eax,%esi rorl $6,%r13d addl %r12d,%r10d andl %esi,%r15d .byte 143,232,120,194,248,13 xorl %r11d,%r14d addl %r13d,%r10d vpsrld $10,%xmm0,%xmm6 xorl %eax,%r15d addl %r10d,%ecx .byte 143,232,120,194,239,2 rorl $2,%r14d addl %r15d,%r10d vpxor %xmm6,%xmm7,%xmm7 movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d vpxor %xmm5,%xmm7,%xmm7 movl %edx,%r12d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r12d vpslldq $8,%xmm7,%xmm7 rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %ecx,%r13d vpaddd %xmm7,%xmm0,%xmm0 addl 8(%rsp),%r9d movl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r12d vpaddd 0(%rbp),%xmm0,%xmm6 xorl %r11d,%r15d rorl $6,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx rorl $2,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%esi rorl $11,%r14d xorl %edx,%r12d xorl %r10d,%esi rorl $6,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax rorl $2,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,0(%rsp) vpalignr $4,%xmm1,%xmm2,%xmm4 rorl $14,%r13d movl %r14d,%r8d vpalignr $4,%xmm3,%xmm0,%xmm7 movl %ebx,%r12d xorl %eax,%r13d .byte 143,232,120,194,236,14 rorl $9,%r14d xorl %ecx,%r12d vpsrld $3,%xmm4,%xmm4 rorl $5,%r13d xorl %r8d,%r14d vpaddd %xmm7,%xmm1,%xmm1 andl %eax,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d .byte 143,232,120,194,245,11 rorl $11,%r14d xorl %ecx,%r12d vpxor %xmm5,%xmm4,%xmm4 xorl %r9d,%r15d rorl $6,%r13d addl %r12d,%edx andl %r15d,%esi .byte 143,232,120,194,248,13 xorl %r8d,%r14d addl %r13d,%edx vpxor %xmm6,%xmm4,%xmm4 xorl %r9d,%esi addl %edx,%r11d vpsrld $10,%xmm0,%xmm6 rorl $2,%r14d addl %esi,%edx vpaddd %xmm4,%xmm1,%xmm1 movl %r11d,%r13d addl %edx,%r14d .byte 143,232,120,194,239,2 rorl $14,%r13d movl %r14d,%edx vpxor %xmm6,%xmm7,%xmm7 movl %eax,%r12d xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%r12d vpxor %xmm5,%xmm7,%xmm7 rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r11d,%r13d vpsrldq $8,%xmm7,%xmm7 addl 20(%rsp),%ecx movl %edx,%esi rorl $11,%r14d xorl %ebx,%r12d vpaddd %xmm7,%xmm1,%xmm1 xorl %r8d,%esi rorl $6,%r13d addl %r12d,%ecx andl %esi,%r15d .byte 143,232,120,194,249,13 xorl %edx,%r14d addl %r13d,%ecx vpsrld $10,%xmm1,%xmm6 xorl %r8d,%r15d addl %ecx,%r10d .byte 143,232,120,194,239,2 rorl $2,%r14d addl %r15d,%ecx vpxor %xmm6,%xmm7,%xmm7 movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx vpxor %xmm5,%xmm7,%xmm7 movl %r11d,%r12d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r12d vpslldq $8,%xmm7,%xmm7 rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r10d,%r13d vpaddd %xmm7,%xmm1,%xmm1 addl 24(%rsp),%ebx movl %ecx,%r15d rorl $11,%r14d xorl %eax,%r12d vpaddd 32(%rbp),%xmm1,%xmm6 xorl %edx,%r15d rorl $6,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d rorl $2,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%esi rorl $11,%r14d xorl %r11d,%r12d xorl %ecx,%esi rorl $6,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d rorl $2,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,16(%rsp) vpalignr $4,%xmm2,%xmm3,%xmm4 rorl $14,%r13d movl %r14d,%eax vpalignr $4,%xmm0,%xmm1,%xmm7 movl %r9d,%r12d xorl %r8d,%r13d .byte 143,232,120,194,236,14 rorl $9,%r14d xorl %r10d,%r12d vpsrld $3,%xmm4,%xmm4 rorl $5,%r13d xorl %eax,%r14d vpaddd %xmm7,%xmm2,%xmm2 andl %r8d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d .byte 143,232,120,194,245,11 rorl $11,%r14d xorl %r10d,%r12d vpxor %xmm5,%xmm4,%xmm4 xorl %ebx,%r15d rorl $6,%r13d addl %r12d,%r11d andl %r15d,%esi .byte 143,232,120,194,249,13 xorl %eax,%r14d addl %r13d,%r11d vpxor %xmm6,%xmm4,%xmm4 xorl %ebx,%esi addl %r11d,%edx vpsrld $10,%xmm1,%xmm6 rorl $2,%r14d addl %esi,%r11d vpaddd %xmm4,%xmm2,%xmm2 movl %edx,%r13d addl %r11d,%r14d .byte 143,232,120,194,239,2 rorl $14,%r13d movl %r14d,%r11d vpxor %xmm6,%xmm7,%xmm7 movl %r8d,%r12d xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%r12d vpxor %xmm5,%xmm7,%xmm7 rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %edx,%r13d vpsrldq $8,%xmm7,%xmm7 addl 36(%rsp),%r10d movl %r11d,%esi rorl $11,%r14d xorl %r9d,%r12d vpaddd %xmm7,%xmm2,%xmm2 xorl %eax,%esi rorl $6,%r13d addl %r12d,%r10d andl %esi,%r15d .byte 143,232,120,194,250,13 xorl %r11d,%r14d addl %r13d,%r10d vpsrld $10,%xmm2,%xmm6 xorl %eax,%r15d addl %r10d,%ecx .byte 143,232,120,194,239,2 rorl $2,%r14d addl %r15d,%r10d vpxor %xmm6,%xmm7,%xmm7 movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d vpxor %xmm5,%xmm7,%xmm7 movl %edx,%r12d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r12d vpslldq $8,%xmm7,%xmm7 rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %ecx,%r13d vpaddd %xmm7,%xmm2,%xmm2 addl 40(%rsp),%r9d movl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r12d vpaddd 64(%rbp),%xmm2,%xmm6 xorl %r11d,%r15d rorl $6,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx rorl $2,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%esi rorl $11,%r14d xorl %edx,%r12d xorl %r10d,%esi rorl $6,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax rorl $2,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,32(%rsp) vpalignr $4,%xmm3,%xmm0,%xmm4 rorl $14,%r13d movl %r14d,%r8d vpalignr $4,%xmm1,%xmm2,%xmm7 movl %ebx,%r12d xorl %eax,%r13d .byte 143,232,120,194,236,14 rorl $9,%r14d xorl %ecx,%r12d vpsrld $3,%xmm4,%xmm4 rorl $5,%r13d xorl %r8d,%r14d vpaddd %xmm7,%xmm3,%xmm3 andl %eax,%r12d vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d .byte 143,232,120,194,245,11 rorl $11,%r14d xorl %ecx,%r12d vpxor %xmm5,%xmm4,%xmm4 xorl %r9d,%r15d rorl $6,%r13d addl %r12d,%edx andl %r15d,%esi .byte 143,232,120,194,250,13 xorl %r8d,%r14d addl %r13d,%edx vpxor %xmm6,%xmm4,%xmm4 xorl %r9d,%esi addl %edx,%r11d vpsrld $10,%xmm2,%xmm6 rorl $2,%r14d addl %esi,%edx vpaddd %xmm4,%xmm3,%xmm3 movl %r11d,%r13d addl %edx,%r14d .byte 143,232,120,194,239,2 rorl $14,%r13d movl %r14d,%edx vpxor %xmm6,%xmm7,%xmm7 movl %eax,%r12d xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%r12d vpxor %xmm5,%xmm7,%xmm7 rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r11d,%r13d vpsrldq $8,%xmm7,%xmm7 addl 52(%rsp),%ecx movl %edx,%esi rorl $11,%r14d xorl %ebx,%r12d vpaddd %xmm7,%xmm3,%xmm3 xorl %r8d,%esi rorl $6,%r13d addl %r12d,%ecx andl %esi,%r15d .byte 143,232,120,194,251,13 xorl %edx,%r14d addl %r13d,%ecx vpsrld $10,%xmm3,%xmm6 xorl %r8d,%r15d addl %ecx,%r10d .byte 143,232,120,194,239,2 rorl $2,%r14d addl %r15d,%ecx vpxor %xmm6,%xmm7,%xmm7 movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx vpxor %xmm5,%xmm7,%xmm7 movl %r11d,%r12d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r12d vpslldq $8,%xmm7,%xmm7 rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r10d,%r13d vpaddd %xmm7,%xmm3,%xmm3 addl 56(%rsp),%ebx movl %ecx,%r15d rorl $11,%r14d xorl %eax,%r12d vpaddd 96(%rbp),%xmm3,%xmm6 xorl %edx,%r15d rorl $6,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d rorl $2,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%esi rorl $11,%r14d xorl %r11d,%r12d xorl %ecx,%esi rorl $6,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d rorl $2,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,48(%rsp) movq 64+0(%rsp),%r12 vpand %xmm14,%xmm11,%xmm11 movq 64+8(%rsp),%r15 vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r15,%r12,1) leaq 16(%r12),%r12 cmpb $0,131(%rbp) jne L$xop_00_47 vmovdqu (%r12),%xmm9 movq %r12,64+0(%rsp) rorl $14,%r13d movl %r14d,%eax movl %r9d,%r12d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d andl %r8d,%r12d vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d rorl $11,%r14d xorl %r10d,%r12d xorl %ebx,%r15d rorl $6,%r13d addl %r12d,%r11d andl %r15d,%esi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi addl %r11d,%edx rorl $2,%r14d addl %esi,%r11d movl %edx,%r13d addl %r11d,%r14d rorl $14,%r13d movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%r12d rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d vpxor %xmm8,%xmm9,%xmm9 xorl %edx,%r13d addl 4(%rsp),%r10d movl %r11d,%esi rorl $11,%r14d xorl %r9d,%r12d xorl %eax,%esi rorl $6,%r13d addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx rorl $2,%r14d addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r12d rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r12d xorl %r11d,%r15d rorl $6,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx rorl $2,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%esi rorl $11,%r14d xorl %edx,%r12d xorl %r10d,%esi rorl $6,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax rorl $2,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d rorl $14,%r13d movl %r14d,%r8d movl %ebx,%r12d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d andl %eax,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r12d xorl %r9d,%r15d rorl $6,%r13d addl %r12d,%edx andl %r15d,%esi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi addl %edx,%r11d rorl $2,%r14d addl %esi,%edx movl %r11d,%r13d addl %edx,%r14d rorl $14,%r13d movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%r12d rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r11d,%r13d addl 20(%rsp),%ecx movl %edx,%esi rorl $11,%r14d xorl %ebx,%r12d xorl %r8d,%esi rorl $6,%r13d addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d rorl $2,%r14d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r12d rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d rorl $11,%r14d xorl %eax,%r12d xorl %edx,%r15d rorl $6,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d rorl $2,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%esi rorl $11,%r14d xorl %r11d,%r12d xorl %ecx,%esi rorl $6,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d rorl $2,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d rorl $14,%r13d movl %r14d,%eax movl %r9d,%r12d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d andl %r8d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d rorl $11,%r14d xorl %r10d,%r12d xorl %ebx,%r15d rorl $6,%r13d addl %r12d,%r11d andl %r15d,%esi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi addl %r11d,%edx rorl $2,%r14d addl %esi,%r11d movl %edx,%r13d addl %r11d,%r14d rorl $14,%r13d movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%r12d rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %edx,%r13d addl 36(%rsp),%r10d movl %r11d,%esi rorl $11,%r14d xorl %r9d,%r12d xorl %eax,%esi rorl $6,%r13d addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx rorl $2,%r14d addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r12d rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r12d xorl %r11d,%r15d rorl $6,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx rorl $2,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%esi rorl $11,%r14d xorl %edx,%r12d xorl %r10d,%esi rorl $6,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax rorl $2,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d rorl $14,%r13d movl %r14d,%r8d movl %ebx,%r12d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d andl %eax,%r12d vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r12d xorl %r9d,%r15d rorl $6,%r13d addl %r12d,%edx andl %r15d,%esi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi addl %edx,%r11d rorl $2,%r14d addl %esi,%edx movl %r11d,%r13d addl %edx,%r14d rorl $14,%r13d movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%r12d rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r11d,%r13d addl 52(%rsp),%ecx movl %edx,%esi rorl $11,%r14d xorl %ebx,%r12d xorl %r8d,%esi rorl $6,%r13d addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d rorl $2,%r14d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r12d rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d rorl $11,%r14d xorl %eax,%r12d xorl %edx,%r15d rorl $6,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d rorl $2,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%esi rorl $11,%r14d xorl %r11d,%r12d xorl %ecx,%esi rorl $6,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d rorl $2,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d movq 64+0(%rsp),%r12 movq 64+8(%rsp),%r13 movq 64+40(%rsp),%r15 movq 64+48(%rsp),%rsi vpand %xmm14,%xmm11,%xmm11 movl %r14d,%eax vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r12,%r13,1) leaq 16(%r12),%r12 addl 0(%r15),%eax addl 4(%r15),%ebx addl 8(%r15),%ecx addl 12(%r15),%edx addl 16(%r15),%r8d addl 20(%r15),%r9d addl 24(%r15),%r10d addl 28(%r15),%r11d cmpq 64+16(%rsp),%r12 movl %eax,0(%r15) movl %ebx,4(%r15) movl %ecx,8(%r15) movl %edx,12(%r15) movl %r8d,16(%r15) movl %r9d,20(%r15) movl %r10d,24(%r15) movl %r11d,28(%r15) jb L$loop_xop movq 64+32(%rsp),%r8 movq 120(%rsp),%rsi vmovdqu %xmm8,(%r8) vzeroall movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue_xop: .byte 0xf3,0xc3 .p2align 6 aesni_cbc_sha256_enc_avx: L$avx_shortcut: movq 8(%rsp),%r10 movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $128,%rsp andq $-64,%rsp shlq $6,%rdx subq %rdi,%rsi subq %rdi,%r10 addq %rdi,%rdx movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %r8,64+32(%rsp) movq %r9,64+40(%rsp) movq %r10,64+48(%rsp) movq %rax,120(%rsp) L$prologue_avx: vzeroall movq %rdi,%r12 leaq 128(%rcx),%rdi leaq K256+544(%rip),%r13 movl 240-128(%rdi),%r14d movq %r9,%r15 movq %r10,%rsi vmovdqu (%r8),%xmm8 subq $9,%r14 movl 0(%r15),%eax movl 4(%r15),%ebx movl 8(%r15),%ecx movl 12(%r15),%edx movl 16(%r15),%r8d movl 20(%r15),%r9d movl 24(%r15),%r10d movl 28(%r15),%r11d vmovdqa 0(%r13,%r14,8),%xmm14 vmovdqa 16(%r13,%r14,8),%xmm13 vmovdqa 32(%r13,%r14,8),%xmm12 vmovdqu 0-128(%rdi),%xmm10 jmp L$loop_avx .p2align 4 L$loop_avx: vmovdqa K256+512(%rip),%xmm7 vmovdqu 0(%rsi,%r12,1),%xmm0 vmovdqu 16(%rsi,%r12,1),%xmm1 vmovdqu 32(%rsi,%r12,1),%xmm2 vmovdqu 48(%rsi,%r12,1),%xmm3 vpshufb %xmm7,%xmm0,%xmm0 leaq K256(%rip),%rbp vpshufb %xmm7,%xmm1,%xmm1 vpshufb %xmm7,%xmm2,%xmm2 vpaddd 0(%rbp),%xmm0,%xmm4 vpshufb %xmm7,%xmm3,%xmm3 vpaddd 32(%rbp),%xmm1,%xmm5 vpaddd 64(%rbp),%xmm2,%xmm6 vpaddd 96(%rbp),%xmm3,%xmm7 vmovdqa %xmm4,0(%rsp) movl %eax,%r14d vmovdqa %xmm5,16(%rsp) movl %ebx,%esi vmovdqa %xmm6,32(%rsp) xorl %ecx,%esi vmovdqa %xmm7,48(%rsp) movl %r8d,%r13d jmp L$avx_00_47 .p2align 4 L$avx_00_47: subq $-32*4,%rbp vmovdqu (%r12),%xmm9 movq %r12,64+0(%rsp) vpalignr $4,%xmm0,%xmm1,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d vpalignr $4,%xmm2,%xmm3,%xmm7 xorl %r8d,%r13d shrdl $9,%r14d,%r14d xorl %r10d,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpaddd %xmm7,%xmm0,%xmm0 vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d vpsrld $3,%xmm4,%xmm7 shrdl $11,%r14d,%r14d xorl %r10d,%r12d xorl %ebx,%r15d vpslld $14,%xmm4,%xmm5 shrdl $6,%r13d,%r13d addl %r12d,%r11d andl %r15d,%esi vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi vpshufd $250,%xmm3,%xmm7 addl %r11d,%edx shrdl $2,%r14d,%r14d addl %esi,%r11d vpsrld $11,%xmm6,%xmm6 movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d vpslld $11,%xmm5,%xmm5 shrdl $9,%r14d,%r14d xorl %r9d,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %r11d,%r14d andl %edx,%r12d vpxor %xmm8,%xmm9,%xmm9 xorl %edx,%r13d vpsrld $10,%xmm7,%xmm6 addl 4(%rsp),%r10d movl %r11d,%esi shrdl $11,%r14d,%r14d vpxor %xmm5,%xmm4,%xmm4 xorl %r9d,%r12d xorl %eax,%esi shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d vpaddd %xmm4,%xmm0,%xmm0 addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx vpxor %xmm7,%xmm6,%xmm6 shrdl $2,%r14d,%r14d addl %r15d,%r10d movl %ecx,%r13d vpsrlq $2,%xmm7,%xmm7 addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d vpxor %xmm7,%xmm6,%xmm6 movl %edx,%r12d xorl %ecx,%r13d shrdl $9,%r14d,%r14d vpshufd $132,%xmm6,%xmm6 xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d vpsrldq $8,%xmm6,%xmm6 andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %ecx,%r13d addl 8(%rsp),%r9d vpaddd %xmm6,%xmm0,%xmm0 movl %r10d,%r15d shrdl $11,%r14d,%r14d xorl %r8d,%r12d vpshufd $80,%xmm0,%xmm7 xorl %r11d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r9d vpsrld $10,%xmm7,%xmm6 andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d vpsrlq $17,%xmm7,%xmm7 xorl %r11d,%esi addl %r9d,%ebx shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d vpsrlq $2,%xmm7,%xmm7 shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d vpxor %xmm7,%xmm6,%xmm6 xorl %ebx,%r13d shrdl $9,%r14d,%r14d xorl %edx,%r12d vpshufd $232,%xmm6,%xmm6 shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vpslldq $8,%xmm6,%xmm6 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%esi vpaddd %xmm6,%xmm0,%xmm0 shrdl $11,%r14d,%r14d xorl %edx,%r12d xorl %r10d,%esi vpaddd 0(%rbp),%xmm0,%xmm6 shrdl $6,%r13d,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax shrdl $2,%r14d,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,0(%rsp) vpalignr $4,%xmm1,%xmm2,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d vpalignr $4,%xmm3,%xmm0,%xmm7 xorl %eax,%r13d shrdl $9,%r14d,%r14d xorl %ecx,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpaddd %xmm7,%xmm1,%xmm1 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d vpsrld $3,%xmm4,%xmm7 shrdl $11,%r14d,%r14d xorl %ecx,%r12d xorl %r9d,%r15d vpslld $14,%xmm4,%xmm5 shrdl $6,%r13d,%r13d addl %r12d,%edx andl %r15d,%esi vpxor %xmm6,%xmm7,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi vpshufd $250,%xmm0,%xmm7 addl %edx,%r11d shrdl $2,%r14d,%r14d addl %esi,%edx vpsrld $11,%xmm6,%xmm6 movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d vpslld $11,%xmm5,%xmm5 shrdl $9,%r14d,%r14d xorl %ebx,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %edx,%r14d andl %r11d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r11d,%r13d vpsrld $10,%xmm7,%xmm6 addl 20(%rsp),%ecx movl %edx,%esi shrdl $11,%r14d,%r14d vpxor %xmm5,%xmm4,%xmm4 xorl %ebx,%r12d xorl %r8d,%esi shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d vpaddd %xmm4,%xmm1,%xmm1 addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d vpxor %xmm7,%xmm6,%xmm6 shrdl $2,%r14d,%r14d addl %r15d,%ecx movl %r10d,%r13d vpsrlq $2,%xmm7,%xmm7 addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx vpxor %xmm7,%xmm6,%xmm6 movl %r11d,%r12d xorl %r10d,%r13d shrdl $9,%r14d,%r14d vpshufd $132,%xmm6,%xmm6 xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d vpsrldq $8,%xmm6,%xmm6 andl %r10d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r10d,%r13d addl 24(%rsp),%ebx vpaddd %xmm6,%xmm1,%xmm1 movl %ecx,%r15d shrdl $11,%r14d,%r14d xorl %eax,%r12d vpshufd $80,%xmm1,%xmm7 xorl %edx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%ebx vpsrld $10,%xmm7,%xmm6 andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx vpsrlq $17,%xmm7,%xmm7 xorl %edx,%esi addl %ebx,%r9d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d vpsrlq $2,%xmm7,%xmm7 shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d vpxor %xmm7,%xmm6,%xmm6 xorl %r9d,%r13d shrdl $9,%r14d,%r14d xorl %r11d,%r12d vpshufd $232,%xmm6,%xmm6 shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpslldq $8,%xmm6,%xmm6 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%esi vpaddd %xmm6,%xmm1,%xmm1 shrdl $11,%r14d,%r14d xorl %r11d,%r12d xorl %ecx,%esi vpaddd 32(%rbp),%xmm1,%xmm6 shrdl $6,%r13d,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d shrdl $2,%r14d,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,16(%rsp) vpalignr $4,%xmm2,%xmm3,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d vpalignr $4,%xmm0,%xmm1,%xmm7 xorl %r8d,%r13d shrdl $9,%r14d,%r14d xorl %r10d,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpaddd %xmm7,%xmm2,%xmm2 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d vpsrld $3,%xmm4,%xmm7 shrdl $11,%r14d,%r14d xorl %r10d,%r12d xorl %ebx,%r15d vpslld $14,%xmm4,%xmm5 shrdl $6,%r13d,%r13d addl %r12d,%r11d andl %r15d,%esi vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi vpshufd $250,%xmm1,%xmm7 addl %r11d,%edx shrdl $2,%r14d,%r14d addl %esi,%r11d vpsrld $11,%xmm6,%xmm6 movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d vpslld $11,%xmm5,%xmm5 shrdl $9,%r14d,%r14d xorl %r9d,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %r11d,%r14d andl %edx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %edx,%r13d vpsrld $10,%xmm7,%xmm6 addl 36(%rsp),%r10d movl %r11d,%esi shrdl $11,%r14d,%r14d vpxor %xmm5,%xmm4,%xmm4 xorl %r9d,%r12d xorl %eax,%esi shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d vpaddd %xmm4,%xmm2,%xmm2 addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx vpxor %xmm7,%xmm6,%xmm6 shrdl $2,%r14d,%r14d addl %r15d,%r10d movl %ecx,%r13d vpsrlq $2,%xmm7,%xmm7 addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d vpxor %xmm7,%xmm6,%xmm6 movl %edx,%r12d xorl %ecx,%r13d shrdl $9,%r14d,%r14d vpshufd $132,%xmm6,%xmm6 xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d vpsrldq $8,%xmm6,%xmm6 andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %ecx,%r13d addl 40(%rsp),%r9d vpaddd %xmm6,%xmm2,%xmm2 movl %r10d,%r15d shrdl $11,%r14d,%r14d xorl %r8d,%r12d vpshufd $80,%xmm2,%xmm7 xorl %r11d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r9d vpsrld $10,%xmm7,%xmm6 andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d vpsrlq $17,%xmm7,%xmm7 xorl %r11d,%esi addl %r9d,%ebx shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d vpsrlq $2,%xmm7,%xmm7 shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d vpxor %xmm7,%xmm6,%xmm6 xorl %ebx,%r13d shrdl $9,%r14d,%r14d xorl %edx,%r12d vpshufd $232,%xmm6,%xmm6 shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vpslldq $8,%xmm6,%xmm6 vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%esi vpaddd %xmm6,%xmm2,%xmm2 shrdl $11,%r14d,%r14d xorl %edx,%r12d xorl %r10d,%esi vpaddd 64(%rbp),%xmm2,%xmm6 shrdl $6,%r13d,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax shrdl $2,%r14d,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,32(%rsp) vpalignr $4,%xmm3,%xmm0,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d vpalignr $4,%xmm1,%xmm2,%xmm7 xorl %eax,%r13d shrdl $9,%r14d,%r14d xorl %ecx,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpaddd %xmm7,%xmm3,%xmm3 vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d vpsrld $3,%xmm4,%xmm7 shrdl $11,%r14d,%r14d xorl %ecx,%r12d xorl %r9d,%r15d vpslld $14,%xmm4,%xmm5 shrdl $6,%r13d,%r13d addl %r12d,%edx andl %r15d,%esi vpxor %xmm6,%xmm7,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi vpshufd $250,%xmm2,%xmm7 addl %edx,%r11d shrdl $2,%r14d,%r14d addl %esi,%edx vpsrld $11,%xmm6,%xmm6 movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d vpslld $11,%xmm5,%xmm5 shrdl $9,%r14d,%r14d xorl %ebx,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %edx,%r14d andl %r11d,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r11d,%r13d vpsrld $10,%xmm7,%xmm6 addl 52(%rsp),%ecx movl %edx,%esi shrdl $11,%r14d,%r14d vpxor %xmm5,%xmm4,%xmm4 xorl %ebx,%r12d xorl %r8d,%esi shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d vpaddd %xmm4,%xmm3,%xmm3 addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d vpxor %xmm7,%xmm6,%xmm6 shrdl $2,%r14d,%r14d addl %r15d,%ecx movl %r10d,%r13d vpsrlq $2,%xmm7,%xmm7 addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx vpxor %xmm7,%xmm6,%xmm6 movl %r11d,%r12d xorl %r10d,%r13d shrdl $9,%r14d,%r14d vpshufd $132,%xmm6,%xmm6 xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d vpsrldq $8,%xmm6,%xmm6 andl %r10d,%r12d vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r10d,%r13d addl 56(%rsp),%ebx vpaddd %xmm6,%xmm3,%xmm3 movl %ecx,%r15d shrdl $11,%r14d,%r14d xorl %eax,%r12d vpshufd $80,%xmm3,%xmm7 xorl %edx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%ebx vpsrld $10,%xmm7,%xmm6 andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx vpsrlq $17,%xmm7,%xmm7 xorl %edx,%esi addl %ebx,%r9d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d vpsrlq $2,%xmm7,%xmm7 shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d vpxor %xmm7,%xmm6,%xmm6 xorl %r9d,%r13d shrdl $9,%r14d,%r14d xorl %r11d,%r12d vpshufd $232,%xmm6,%xmm6 shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpslldq $8,%xmm6,%xmm6 vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%esi vpaddd %xmm6,%xmm3,%xmm3 shrdl $11,%r14d,%r14d xorl %r11d,%r12d xorl %ecx,%esi vpaddd 96(%rbp),%xmm3,%xmm6 shrdl $6,%r13d,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d shrdl $2,%r14d,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,48(%rsp) movq 64+0(%rsp),%r12 vpand %xmm14,%xmm11,%xmm11 movq 64+8(%rsp),%r15 vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r15,%r12,1) leaq 16(%r12),%r12 cmpb $0,131(%rbp) jne L$avx_00_47 vmovdqu (%r12),%xmm9 movq %r12,64+0(%rsp) shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d xorl %r8d,%r13d shrdl $9,%r14d,%r14d xorl %r10d,%r12d shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d shrdl $11,%r14d,%r14d xorl %r10d,%r12d xorl %ebx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r11d andl %r15d,%esi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi addl %r11d,%edx shrdl $2,%r14d,%r14d addl %esi,%r11d movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d shrdl $9,%r14d,%r14d xorl %r9d,%r12d shrdl $5,%r13d,%r13d xorl %r11d,%r14d andl %edx,%r12d vpxor %xmm8,%xmm9,%xmm9 xorl %edx,%r13d addl 4(%rsp),%r10d movl %r11d,%esi shrdl $11,%r14d,%r14d xorl %r9d,%r12d xorl %eax,%esi shrdl $6,%r13d,%r13d addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx shrdl $2,%r14d,%r14d addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d movl %edx,%r12d xorl %ecx,%r13d shrdl $9,%r14d,%r14d xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d shrdl $11,%r14d,%r14d xorl %r8d,%r12d xorl %r11d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx shrdl $2,%r14d,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d shrdl $9,%r14d,%r14d xorl %edx,%r12d shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%esi shrdl $11,%r14d,%r14d xorl %edx,%r12d xorl %r10d,%esi shrdl $6,%r13d,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax shrdl $2,%r14d,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d xorl %eax,%r13d shrdl $9,%r14d,%r14d xorl %ecx,%r12d shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d shrdl $11,%r14d,%r14d xorl %ecx,%r12d xorl %r9d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%edx andl %r15d,%esi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi addl %edx,%r11d shrdl $2,%r14d,%r14d addl %esi,%edx movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d shrdl $9,%r14d,%r14d xorl %ebx,%r12d shrdl $5,%r13d,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r11d,%r13d addl 20(%rsp),%ecx movl %edx,%esi shrdl $11,%r14d,%r14d xorl %ebx,%r12d xorl %r8d,%esi shrdl $6,%r13d,%r13d addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d shrdl $2,%r14d,%r14d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx movl %r11d,%r12d xorl %r10d,%r13d shrdl $9,%r14d,%r14d xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d andl %r10d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d shrdl $11,%r14d,%r14d xorl %eax,%r12d xorl %edx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d shrdl $2,%r14d,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d shrdl $9,%r14d,%r14d xorl %r11d,%r12d shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%esi shrdl $11,%r14d,%r14d xorl %r11d,%r12d xorl %ecx,%esi shrdl $6,%r13d,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d shrdl $2,%r14d,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d xorl %r8d,%r13d shrdl $9,%r14d,%r14d xorl %r10d,%r12d shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d shrdl $11,%r14d,%r14d xorl %r10d,%r12d xorl %ebx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r11d andl %r15d,%esi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi addl %r11d,%edx shrdl $2,%r14d,%r14d addl %esi,%r11d movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d shrdl $9,%r14d,%r14d xorl %r9d,%r12d shrdl $5,%r13d,%r13d xorl %r11d,%r14d andl %edx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %edx,%r13d addl 36(%rsp),%r10d movl %r11d,%esi shrdl $11,%r14d,%r14d xorl %r9d,%r12d xorl %eax,%esi shrdl $6,%r13d,%r13d addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx shrdl $2,%r14d,%r14d addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d movl %edx,%r12d xorl %ecx,%r13d shrdl $9,%r14d,%r14d xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d shrdl $11,%r14d,%r14d xorl %r8d,%r12d xorl %r11d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx shrdl $2,%r14d,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d shrdl $9,%r14d,%r14d xorl %edx,%r12d shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%esi shrdl $11,%r14d,%r14d xorl %edx,%r12d xorl %r10d,%esi shrdl $6,%r13d,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax shrdl $2,%r14d,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d xorl %eax,%r13d shrdl $9,%r14d,%r14d xorl %ecx,%r12d shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d shrdl $11,%r14d,%r14d xorl %ecx,%r12d xorl %r9d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%edx andl %r15d,%esi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi addl %edx,%r11d shrdl $2,%r14d,%r14d addl %esi,%edx movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d shrdl $9,%r14d,%r14d xorl %ebx,%r12d shrdl $5,%r13d,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r11d,%r13d addl 52(%rsp),%ecx movl %edx,%esi shrdl $11,%r14d,%r14d xorl %ebx,%r12d xorl %r8d,%esi shrdl $6,%r13d,%r13d addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d shrdl $2,%r14d,%r14d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx movl %r11d,%r12d xorl %r10d,%r13d shrdl $9,%r14d,%r14d xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d andl %r10d,%r12d vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d shrdl $11,%r14d,%r14d xorl %eax,%r12d xorl %edx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d shrdl $2,%r14d,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d shrdl $9,%r14d,%r14d xorl %r11d,%r12d shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%esi shrdl $11,%r14d,%r14d xorl %r11d,%r12d xorl %ecx,%esi shrdl $6,%r13d,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d shrdl $2,%r14d,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d movq 64+0(%rsp),%r12 movq 64+8(%rsp),%r13 movq 64+40(%rsp),%r15 movq 64+48(%rsp),%rsi vpand %xmm14,%xmm11,%xmm11 movl %r14d,%eax vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r12,%r13,1) leaq 16(%r12),%r12 addl 0(%r15),%eax addl 4(%r15),%ebx addl 8(%r15),%ecx addl 12(%r15),%edx addl 16(%r15),%r8d addl 20(%r15),%r9d addl 24(%r15),%r10d addl 28(%r15),%r11d cmpq 64+16(%rsp),%r12 movl %eax,0(%r15) movl %ebx,4(%r15) movl %ecx,8(%r15) movl %edx,12(%r15) movl %r8d,16(%r15) movl %r9d,20(%r15) movl %r10d,24(%r15) movl %r11d,28(%r15) jb L$loop_avx movq 64+32(%rsp),%r8 movq 120(%rsp),%rsi vmovdqu %xmm8,(%r8) vzeroall movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue_avx: .byte 0xf3,0xc3 .p2align 6 aesni_cbc_sha256_enc_avx2: L$avx2_shortcut: movq 8(%rsp),%r10 movq %rsp,%rax pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 subq $576,%rsp andq $-1024,%rsp addq $448,%rsp shlq $6,%rdx subq %rdi,%rsi subq %rdi,%r10 addq %rdi,%rdx movq %rdx,64+16(%rsp) movq %r8,64+32(%rsp) movq %r9,64+40(%rsp) movq %r10,64+48(%rsp) movq %rax,120(%rsp) L$prologue_avx2: vzeroall movq %rdi,%r13 vpinsrq $1,%rsi,%xmm15,%xmm15 leaq 128(%rcx),%rdi leaq K256+544(%rip),%r12 movl 240-128(%rdi),%r14d movq %r9,%r15 movq %r10,%rsi vmovdqu (%r8),%xmm8 leaq -9(%r14),%r14 vmovdqa 0(%r12,%r14,8),%xmm14 vmovdqa 16(%r12,%r14,8),%xmm13 vmovdqa 32(%r12,%r14,8),%xmm12 subq $-64,%r13 movl 0(%r15),%eax leaq (%rsi,%r13,1),%r12 movl 4(%r15),%ebx cmpq %rdx,%r13 movl 8(%r15),%ecx cmoveq %rsp,%r12 movl 12(%r15),%edx movl 16(%r15),%r8d movl 20(%r15),%r9d movl 24(%r15),%r10d movl 28(%r15),%r11d vmovdqu 0-128(%rdi),%xmm10 jmp L$oop_avx2 .p2align 4 L$oop_avx2: vmovdqa K256+512(%rip),%ymm7 vmovdqu -64+0(%rsi,%r13,1),%xmm0 vmovdqu -64+16(%rsi,%r13,1),%xmm1 vmovdqu -64+32(%rsi,%r13,1),%xmm2 vmovdqu -64+48(%rsi,%r13,1),%xmm3 vinserti128 $1,(%r12),%ymm0,%ymm0 vinserti128 $1,16(%r12),%ymm1,%ymm1 vpshufb %ymm7,%ymm0,%ymm0 vinserti128 $1,32(%r12),%ymm2,%ymm2 vpshufb %ymm7,%ymm1,%ymm1 vinserti128 $1,48(%r12),%ymm3,%ymm3 leaq K256(%rip),%rbp vpshufb %ymm7,%ymm2,%ymm2 leaq -64(%r13),%r13 vpaddd 0(%rbp),%ymm0,%ymm4 vpshufb %ymm7,%ymm3,%ymm3 vpaddd 32(%rbp),%ymm1,%ymm5 vpaddd 64(%rbp),%ymm2,%ymm6 vpaddd 96(%rbp),%ymm3,%ymm7 vmovdqa %ymm4,0(%rsp) xorl %r14d,%r14d vmovdqa %ymm5,32(%rsp) movq 120(%rsp),%rsi leaq -64(%rsp),%rsp movq %rsi,-8(%rsp) movl %ebx,%esi vmovdqa %ymm6,0(%rsp) xorl %ecx,%esi vmovdqa %ymm7,32(%rsp) movl %r9d,%r12d subq $-32*4,%rbp jmp L$avx2_00_47 .p2align 4 L$avx2_00_47: vmovdqu (%r13),%xmm9 vpinsrq $0,%r13,%xmm15,%xmm15 leaq -64(%rsp),%rsp pushq 64-8(%rsp) leaq 8(%rsp),%rsp vpalignr $4,%ymm0,%ymm1,%ymm4 addl 0+128(%rsp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d vpalignr $4,%ymm2,%ymm3,%ymm7 rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d vpsrld $7,%ymm4,%ymm6 andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d vpaddd %ymm7,%ymm0,%ymm0 leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d vpsrld $3,%ymm4,%ymm7 rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d vpslld $14,%ymm4,%ymm5 rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx vpxor %ymm6,%ymm7,%ymm4 andl %r15d,%esi vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi vpshufd $250,%ymm3,%ymm7 xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d vpsrld $11,%ymm6,%ymm6 addl 4+128(%rsp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d vpxor %ymm5,%ymm4,%ymm4 rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d vpslld $11,%ymm5,%ymm5 andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d vpxor %ymm6,%ymm4,%ymm4 leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi vpsrld $10,%ymm7,%ymm6 rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi vpxor %ymm5,%ymm4,%ymm4 rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx vpsrlq $17,%ymm7,%ymm7 andl %esi,%r15d vpxor %xmm8,%xmm9,%xmm9 xorl %r12d,%r14d xorl %eax,%r15d vpaddd %ymm4,%ymm0,%ymm0 xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d vpxor %ymm7,%ymm6,%ymm6 addl 8+128(%rsp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d vpsrlq $2,%ymm7,%ymm7 rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d vpxor %ymm7,%ymm6,%ymm6 andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d vpshufd $132,%ymm6,%ymm6 leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d vpsrldq $8,%ymm6,%ymm6 rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d vpaddd %ymm6,%ymm0,%ymm0 rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx vpshufd $80,%ymm0,%ymm7 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi vpsrld $10,%ymm7,%ymm6 xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d vpsrlq $17,%ymm7,%ymm7 addl 12+128(%rsp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d vpxor %ymm7,%ymm6,%ymm6 rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d vpsrlq $2,%ymm7,%ymm7 andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d vpxor %ymm7,%ymm6,%ymm6 leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi vpshufd $232,%ymm6,%ymm6 rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi vpslldq $8,%ymm6,%ymm6 rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax vpaddd %ymm6,%ymm0,%ymm0 andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d vpaddd 0(%rbp),%ymm0,%ymm6 xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d vmovdqa %ymm6,0(%rsp) vpalignr $4,%ymm1,%ymm2,%ymm4 addl 32+128(%rsp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d vpalignr $4,%ymm3,%ymm0,%ymm7 rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx vpsrld $7,%ymm4,%ymm6 andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d vpaddd %ymm7,%ymm1,%ymm1 leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d vpsrld $3,%ymm4,%ymm7 rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d vpslld $14,%ymm4,%ymm5 rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d vpxor %ymm6,%ymm7,%ymm4 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi vpshufd $250,%ymm0,%ymm7 xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d vpsrld $11,%ymm6,%ymm6 addl 36+128(%rsp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d vpxor %ymm5,%ymm4,%ymm4 rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx vpslld $11,%ymm5,%ymm5 andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d vpxor %ymm6,%ymm4,%ymm4 leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi vpsrld $10,%ymm7,%ymm6 rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi vpxor %ymm5,%ymm4,%ymm4 rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d vpsrlq $17,%ymm7,%ymm7 andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d vpaddd %ymm4,%ymm1,%ymm1 xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d vpxor %ymm7,%ymm6,%ymm6 addl 40+128(%rsp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d vpsrlq $2,%ymm7,%ymm7 rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx vpxor %ymm7,%ymm6,%ymm6 andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d vpshufd $132,%ymm6,%ymm6 leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d vpsrldq $8,%ymm6,%ymm6 rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d vpaddd %ymm6,%ymm1,%ymm1 rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d vpshufd $80,%ymm1,%ymm7 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi vpsrld $10,%ymm7,%ymm6 xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d vpsrlq $17,%ymm7,%ymm7 addl 44+128(%rsp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d vpxor %ymm7,%ymm6,%ymm6 rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax vpsrlq $2,%ymm7,%ymm7 andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d vpxor %ymm7,%ymm6,%ymm6 leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi vpshufd $232,%ymm6,%ymm6 rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi vpslldq $8,%ymm6,%ymm6 rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d vpaddd %ymm6,%ymm1,%ymm1 andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d vpaddd 32(%rbp),%ymm1,%ymm6 xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d vmovdqa %ymm6,32(%rsp) leaq -64(%rsp),%rsp pushq 64-8(%rsp) leaq 8(%rsp),%rsp vpalignr $4,%ymm2,%ymm3,%ymm4 addl 0+128(%rsp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d vpalignr $4,%ymm0,%ymm1,%ymm7 rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d vpsrld $7,%ymm4,%ymm6 andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d vpaddd %ymm7,%ymm2,%ymm2 leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d vpsrld $3,%ymm4,%ymm7 rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d vpslld $14,%ymm4,%ymm5 rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx vpxor %ymm6,%ymm7,%ymm4 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi vpshufd $250,%ymm1,%ymm7 xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d vpsrld $11,%ymm6,%ymm6 addl 4+128(%rsp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d vpxor %ymm5,%ymm4,%ymm4 rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d vpslld $11,%ymm5,%ymm5 andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d vpxor %ymm6,%ymm4,%ymm4 leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi vpsrld $10,%ymm7,%ymm6 rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi vpxor %ymm5,%ymm4,%ymm4 rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx vpsrlq $17,%ymm7,%ymm7 andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %eax,%r15d vpaddd %ymm4,%ymm2,%ymm2 xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d vpxor %ymm7,%ymm6,%ymm6 addl 8+128(%rsp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d vpsrlq $2,%ymm7,%ymm7 rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d vpxor %ymm7,%ymm6,%ymm6 andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d vpshufd $132,%ymm6,%ymm6 leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d vpsrldq $8,%ymm6,%ymm6 rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d vpaddd %ymm6,%ymm2,%ymm2 rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx vpshufd $80,%ymm2,%ymm7 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi vpsrld $10,%ymm7,%ymm6 xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d vpsrlq $17,%ymm7,%ymm7 addl 12+128(%rsp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d vpxor %ymm7,%ymm6,%ymm6 rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d vpsrlq $2,%ymm7,%ymm7 andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d vpxor %ymm7,%ymm6,%ymm6 leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi vpshufd $232,%ymm6,%ymm6 rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi vpslldq $8,%ymm6,%ymm6 rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax vpaddd %ymm6,%ymm2,%ymm2 andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d vpaddd 64(%rbp),%ymm2,%ymm6 xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d vmovdqa %ymm6,0(%rsp) vpalignr $4,%ymm3,%ymm0,%ymm4 addl 32+128(%rsp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d vpalignr $4,%ymm1,%ymm2,%ymm7 rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx vpsrld $7,%ymm4,%ymm6 andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d vpaddd %ymm7,%ymm3,%ymm3 leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d vpsrld $3,%ymm4,%ymm7 rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d vpslld $14,%ymm4,%ymm5 rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d vpxor %ymm6,%ymm7,%ymm4 andl %r15d,%esi vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi vpshufd $250,%ymm2,%ymm7 xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d vpsrld $11,%ymm6,%ymm6 addl 36+128(%rsp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d vpxor %ymm5,%ymm4,%ymm4 rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx vpslld $11,%ymm5,%ymm5 andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d vpxor %ymm6,%ymm4,%ymm4 leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi vpsrld $10,%ymm7,%ymm6 rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi vpxor %ymm5,%ymm4,%ymm4 rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d vpsrlq $17,%ymm7,%ymm7 andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d vpaddd %ymm4,%ymm3,%ymm3 xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d vpxor %ymm7,%ymm6,%ymm6 addl 40+128(%rsp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d vpsrlq $2,%ymm7,%ymm7 rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx vpxor %ymm7,%ymm6,%ymm6 andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d vpshufd $132,%ymm6,%ymm6 leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d vpsrldq $8,%ymm6,%ymm6 rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d vpaddd %ymm6,%ymm3,%ymm3 rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d vpshufd $80,%ymm3,%ymm7 andl %r15d,%esi vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi vpsrld $10,%ymm7,%ymm6 xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d vpsrlq $17,%ymm7,%ymm7 addl 44+128(%rsp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d vpxor %ymm7,%ymm6,%ymm6 rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax vpsrlq $2,%ymm7,%ymm7 andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d vpxor %ymm7,%ymm6,%ymm6 leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi vpshufd $232,%ymm6,%ymm6 rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi vpslldq $8,%ymm6,%ymm6 rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d vpaddd %ymm6,%ymm3,%ymm3 andl %esi,%r15d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d vpaddd 96(%rbp),%ymm3,%ymm6 xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d vmovdqa %ymm6,32(%rsp) vmovq %xmm15,%r13 vpextrq $1,%xmm15,%r15 vpand %xmm14,%xmm11,%xmm11 vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r15,%r13,1) leaq 16(%r13),%r13 leaq 128(%rbp),%rbp cmpb $0,3(%rbp) jne L$avx2_00_47 vmovdqu (%r13),%xmm9 vpinsrq $0,%r13,%xmm15,%xmm15 addl 0+64(%rsp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx andl %r15d,%esi vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d addl 4+64(%rsp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx andl %esi,%r15d vpxor %xmm8,%xmm9,%xmm9 xorl %r12d,%r14d xorl %eax,%r15d xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d addl 8+64(%rsp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d addl 12+64(%rsp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d addl 32+64(%rsp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d addl 36+64(%rsp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d addl 40+64(%rsp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d addl 44+64(%rsp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d addl 0(%rsp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d addl 4(%rsp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %eax,%r15d xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d addl 8(%rsp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d addl 12(%rsp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d addl 32(%rsp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d andl %r15d,%esi vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d addl 36(%rsp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d addl 40(%rsp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d andl %r15d,%esi vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d addl 44(%rsp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d andl %esi,%r15d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d vpextrq $1,%xmm15,%r12 vmovq %xmm15,%r13 movq 552(%rsp),%r15 addl %r14d,%eax leaq 448(%rsp),%rbp vpand %xmm14,%xmm11,%xmm11 vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r12,%r13,1) leaq 16(%r13),%r13 addl 0(%r15),%eax addl 4(%r15),%ebx addl 8(%r15),%ecx addl 12(%r15),%edx addl 16(%r15),%r8d addl 20(%r15),%r9d addl 24(%r15),%r10d addl 28(%r15),%r11d movl %eax,0(%r15) movl %ebx,4(%r15) movl %ecx,8(%r15) movl %edx,12(%r15) movl %r8d,16(%r15) movl %r9d,20(%r15) movl %r10d,24(%r15) movl %r11d,28(%r15) cmpq 80(%rbp),%r13 je L$done_avx2 xorl %r14d,%r14d movl %ebx,%esi movl %r9d,%r12d xorl %ecx,%esi jmp L$ower_avx2 .p2align 4 L$ower_avx2: vmovdqu (%r13),%xmm9 vpinsrq $0,%r13,%xmm15,%xmm15 addl 0+16(%rbp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx andl %r15d,%esi vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d addl 4+16(%rbp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx andl %esi,%r15d vpxor %xmm8,%xmm9,%xmm9 xorl %r12d,%r14d xorl %eax,%r15d xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d addl 8+16(%rbp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d addl 12+16(%rbp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d addl 32+16(%rbp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d addl 36+16(%rbp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d addl 40+16(%rbp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d addl 44+16(%rbp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d leaq -64(%rbp),%rbp addl 0+16(%rbp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d addl 4+16(%rbp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %eax,%r15d xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d addl 8+16(%rbp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d addl 12+16(%rbp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d addl 32+16(%rbp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d andl %r15d,%esi vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d addl 36+16(%rbp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d addl 40+16(%rbp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d andl %r15d,%esi vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d addl 44+16(%rbp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d andl %esi,%r15d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d vmovq %xmm15,%r13 vpextrq $1,%xmm15,%r15 vpand %xmm14,%xmm11,%xmm11 vpor %xmm11,%xmm8,%xmm8 leaq -64(%rbp),%rbp vmovdqu %xmm8,(%r15,%r13,1) leaq 16(%r13),%r13 cmpq %rsp,%rbp jae L$ower_avx2 movq 552(%rsp),%r15 leaq 64(%r13),%r13 movq 560(%rsp),%rsi addl %r14d,%eax leaq 448(%rsp),%rsp addl 0(%r15),%eax addl 4(%r15),%ebx addl 8(%r15),%ecx addl 12(%r15),%edx addl 16(%r15),%r8d addl 20(%r15),%r9d addl 24(%r15),%r10d leaq (%rsi,%r13,1),%r12 addl 28(%r15),%r11d cmpq 64+16(%rsp),%r13 movl %eax,0(%r15) cmoveq %rsp,%r12 movl %ebx,4(%r15) movl %ecx,8(%r15) movl %edx,12(%r15) movl %r8d,16(%r15) movl %r9d,20(%r15) movl %r10d,24(%r15) movl %r11d,28(%r15) jbe L$oop_avx2 leaq (%rsp),%rbp L$done_avx2: movq 64+32(%rbp),%r8 movq 64+56(%rbp),%rsi vmovdqu %xmm8,(%r8) vzeroall movq -48(%rsi),%r15 movq -40(%rsi),%r14 movq -32(%rsi),%r13 movq -24(%rsi),%r12 movq -16(%rsi),%rbp movq -8(%rsi),%rbx leaq (%rsi),%rsp L$epilogue_avx2: .byte 0xf3,0xc3 .p2align 5 aesni_cbc_sha256_enc_shaext: movq 8(%rsp),%r10 leaq K256+128(%rip),%rax movdqu (%r9),%xmm1 movdqu 16(%r9),%xmm2 movdqa 512-128(%rax),%xmm3 movl 240(%rcx),%r11d subq %rdi,%rsi movups (%rcx),%xmm15 movups (%r8),%xmm6 movups 16(%rcx),%xmm4 leaq 112(%rcx),%rcx pshufd $0x1b,%xmm1,%xmm0 pshufd $0xb1,%xmm1,%xmm1 pshufd $0x1b,%xmm2,%xmm2 movdqa %xmm3,%xmm7 .byte 102,15,58,15,202,8 punpcklqdq %xmm0,%xmm2 jmp L$oop_shaext .p2align 4 L$oop_shaext: movdqu (%r10),%xmm10 movdqu 16(%r10),%xmm11 movdqu 32(%r10),%xmm12 .byte 102,68,15,56,0,211 movdqu 48(%r10),%xmm13 movdqa 0-128(%rax),%xmm0 paddd %xmm10,%xmm0 .byte 102,68,15,56,0,219 movdqa %xmm2,%xmm9 movdqa %xmm1,%xmm8 movups 0(%rdi),%xmm14 xorps %xmm15,%xmm14 xorps %xmm14,%xmm6 movups -80(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups -64(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 32-128(%rax),%xmm0 paddd %xmm11,%xmm0 .byte 102,68,15,56,0,227 leaq 64(%r10),%r10 movups -48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups -32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 64-128(%rax),%xmm0 paddd %xmm12,%xmm0 .byte 102,68,15,56,0,235 .byte 69,15,56,204,211 movups -16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm13,%xmm3 .byte 102,65,15,58,15,220,4 paddd %xmm3,%xmm10 movups 0(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 96-128(%rax),%xmm0 paddd %xmm13,%xmm0 .byte 69,15,56,205,213 .byte 69,15,56,204,220 movups 16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups 32(%rcx),%xmm4 aesenc %xmm5,%xmm6 movdqa %xmm10,%xmm3 .byte 102,65,15,58,15,221,4 paddd %xmm3,%xmm11 .byte 15,56,203,202 movdqa 128-128(%rax),%xmm0 paddd %xmm10,%xmm0 .byte 69,15,56,205,218 .byte 69,15,56,204,229 movups 48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm11,%xmm3 .byte 102,65,15,58,15,218,4 paddd %xmm3,%xmm12 cmpl $11,%r11d jb L$aesenclast1 movups 64(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 80(%rcx),%xmm5 aesenc %xmm4,%xmm6 je L$aesenclast1 movups 96(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 112(%rcx),%xmm5 aesenc %xmm4,%xmm6 L$aesenclast1: aesenclast %xmm5,%xmm6 movups 16-112(%rcx),%xmm4 nop .byte 15,56,203,202 movups 16(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm6,0(%rsi,%rdi,1) xorps %xmm14,%xmm6 movups -80(%rcx),%xmm5 aesenc %xmm4,%xmm6 movdqa 160-128(%rax),%xmm0 paddd %xmm11,%xmm0 .byte 69,15,56,205,227 .byte 69,15,56,204,234 movups -64(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm12,%xmm3 .byte 102,65,15,58,15,219,4 paddd %xmm3,%xmm13 movups -48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 192-128(%rax),%xmm0 paddd %xmm12,%xmm0 .byte 69,15,56,205,236 .byte 69,15,56,204,211 movups -32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm13,%xmm3 .byte 102,65,15,58,15,220,4 paddd %xmm3,%xmm10 movups -16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 224-128(%rax),%xmm0 paddd %xmm13,%xmm0 .byte 69,15,56,205,213 .byte 69,15,56,204,220 movups 0(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm10,%xmm3 .byte 102,65,15,58,15,221,4 paddd %xmm3,%xmm11 movups 16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 256-128(%rax),%xmm0 paddd %xmm10,%xmm0 .byte 69,15,56,205,218 .byte 69,15,56,204,229 movups 32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm11,%xmm3 .byte 102,65,15,58,15,218,4 paddd %xmm3,%xmm12 movups 48(%rcx),%xmm5 aesenc %xmm4,%xmm6 cmpl $11,%r11d jb L$aesenclast2 movups 64(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 80(%rcx),%xmm5 aesenc %xmm4,%xmm6 je L$aesenclast2 movups 96(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 112(%rcx),%xmm5 aesenc %xmm4,%xmm6 L$aesenclast2: aesenclast %xmm5,%xmm6 movups 16-112(%rcx),%xmm4 nop .byte 15,56,203,202 movups 32(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm6,16(%rsi,%rdi,1) xorps %xmm14,%xmm6 movups -80(%rcx),%xmm5 aesenc %xmm4,%xmm6 movdqa 288-128(%rax),%xmm0 paddd %xmm11,%xmm0 .byte 69,15,56,205,227 .byte 69,15,56,204,234 movups -64(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm12,%xmm3 .byte 102,65,15,58,15,219,4 paddd %xmm3,%xmm13 movups -48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 320-128(%rax),%xmm0 paddd %xmm12,%xmm0 .byte 69,15,56,205,236 .byte 69,15,56,204,211 movups -32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm13,%xmm3 .byte 102,65,15,58,15,220,4 paddd %xmm3,%xmm10 movups -16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 352-128(%rax),%xmm0 paddd %xmm13,%xmm0 .byte 69,15,56,205,213 .byte 69,15,56,204,220 movups 0(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm10,%xmm3 .byte 102,65,15,58,15,221,4 paddd %xmm3,%xmm11 movups 16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 384-128(%rax),%xmm0 paddd %xmm10,%xmm0 .byte 69,15,56,205,218 .byte 69,15,56,204,229 movups 32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm11,%xmm3 .byte 102,65,15,58,15,218,4 paddd %xmm3,%xmm12 movups 48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 416-128(%rax),%xmm0 paddd %xmm11,%xmm0 .byte 69,15,56,205,227 .byte 69,15,56,204,234 cmpl $11,%r11d jb L$aesenclast3 movups 64(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 80(%rcx),%xmm5 aesenc %xmm4,%xmm6 je L$aesenclast3 movups 96(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 112(%rcx),%xmm5 aesenc %xmm4,%xmm6 L$aesenclast3: aesenclast %xmm5,%xmm6 movups 16-112(%rcx),%xmm4 nop .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm12,%xmm3 .byte 102,65,15,58,15,219,4 paddd %xmm3,%xmm13 movups 48(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm6,32(%rsi,%rdi,1) xorps %xmm14,%xmm6 movups -80(%rcx),%xmm5 aesenc %xmm4,%xmm6 movups -64(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 448-128(%rax),%xmm0 paddd %xmm12,%xmm0 .byte 69,15,56,205,236 movdqa %xmm7,%xmm3 movups -48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups -32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 480-128(%rax),%xmm0 paddd %xmm13,%xmm0 movups -16(%rcx),%xmm5 aesenc %xmm4,%xmm6 movups 0(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups 16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movups 32(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 48(%rcx),%xmm5 aesenc %xmm4,%xmm6 cmpl $11,%r11d jb L$aesenclast4 movups 64(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 80(%rcx),%xmm5 aesenc %xmm4,%xmm6 je L$aesenclast4 movups 96(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 112(%rcx),%xmm5 aesenc %xmm4,%xmm6 L$aesenclast4: aesenclast %xmm5,%xmm6 movups 16-112(%rcx),%xmm4 nop paddd %xmm9,%xmm2 paddd %xmm8,%xmm1 decq %rdx movups %xmm6,48(%rsi,%rdi,1) leaq 64(%rdi),%rdi jnz L$oop_shaext pshufd $0xb1,%xmm2,%xmm2 pshufd $0x1b,%xmm1,%xmm3 pshufd $0xb1,%xmm1,%xmm1 punpckhqdq %xmm2,%xmm1 .byte 102,15,58,15,211,8 movups %xmm6,(%r8) movdqu %xmm1,(%r9) movdqu %xmm2,16(%r9) .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
65,559
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .section __DATA,__const .p2align 4 one: .quad 1,0 two: .quad 2,0 three: .quad 3,0 four: .quad 4,0 five: .quad 5,0 six: .quad 6,0 seven: .quad 7,0 eight: .quad 8,0 OR_MASK: .long 0x00000000,0x00000000,0x00000000,0x80000000 poly: .quad 0x1, 0xc200000000000000 mask: .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d con1: .long 1,1,1,1 con2: .long 0x1b,0x1b,0x1b,0x1b con3: .byte -1,-1,-1,-1,-1,-1,-1,-1,4,5,6,7,4,5,6,7 and_mask: .long 0,0xffffffff, 0xffffffff, 0xffffffff .text .p2align 4 GFMUL: vpclmulqdq $0x00,%xmm1,%xmm0,%xmm2 vpclmulqdq $0x11,%xmm1,%xmm0,%xmm5 vpclmulqdq $0x10,%xmm1,%xmm0,%xmm3 vpclmulqdq $0x01,%xmm1,%xmm0,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $8,%xmm3,%xmm4 vpsrldq $8,%xmm3,%xmm3 vpxor %xmm4,%xmm2,%xmm2 vpxor %xmm3,%xmm5,%xmm5 vpclmulqdq $0x10,poly(%rip),%xmm2,%xmm3 vpshufd $78,%xmm2,%xmm4 vpxor %xmm4,%xmm3,%xmm2 vpclmulqdq $0x10,poly(%rip),%xmm2,%xmm3 vpshufd $78,%xmm2,%xmm4 vpxor %xmm4,%xmm3,%xmm2 vpxor %xmm5,%xmm2,%xmm0 .byte 0xf3,0xc3 .globl _aesgcmsiv_htable_init .private_extern _aesgcmsiv_htable_init .p2align 4 _aesgcmsiv_htable_init: _CET_ENDBR vmovdqa (%rsi),%xmm0 vmovdqa %xmm0,%xmm1 vmovdqa %xmm0,(%rdi) call GFMUL vmovdqa %xmm0,16(%rdi) call GFMUL vmovdqa %xmm0,32(%rdi) call GFMUL vmovdqa %xmm0,48(%rdi) call GFMUL vmovdqa %xmm0,64(%rdi) call GFMUL vmovdqa %xmm0,80(%rdi) call GFMUL vmovdqa %xmm0,96(%rdi) call GFMUL vmovdqa %xmm0,112(%rdi) .byte 0xf3,0xc3 .globl _aesgcmsiv_htable6_init .private_extern _aesgcmsiv_htable6_init .p2align 4 _aesgcmsiv_htable6_init: _CET_ENDBR vmovdqa (%rsi),%xmm0 vmovdqa %xmm0,%xmm1 vmovdqa %xmm0,(%rdi) call GFMUL vmovdqa %xmm0,16(%rdi) call GFMUL vmovdqa %xmm0,32(%rdi) call GFMUL vmovdqa %xmm0,48(%rdi) call GFMUL vmovdqa %xmm0,64(%rdi) call GFMUL vmovdqa %xmm0,80(%rdi) .byte 0xf3,0xc3 .globl _aesgcmsiv_htable_polyval .private_extern _aesgcmsiv_htable_polyval .p2align 4 _aesgcmsiv_htable_polyval: _CET_ENDBR testq %rdx,%rdx jnz L$htable_polyval_start .byte 0xf3,0xc3 L$htable_polyval_start: vzeroall movq %rdx,%r11 andq $127,%r11 jz L$htable_polyval_no_prefix vpxor %xmm9,%xmm9,%xmm9 vmovdqa (%rcx),%xmm1 subq %r11,%rdx subq $16,%r11 vmovdqu (%rsi),%xmm0 vpxor %xmm1,%xmm0,%xmm0 vpclmulqdq $0x01,(%rdi,%r11,1),%xmm0,%xmm5 vpclmulqdq $0x00,(%rdi,%r11,1),%xmm0,%xmm3 vpclmulqdq $0x11,(%rdi,%r11,1),%xmm0,%xmm4 vpclmulqdq $0x10,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 leaq 16(%rsi),%rsi testq %r11,%r11 jnz L$htable_polyval_prefix_loop jmp L$htable_polyval_prefix_complete .p2align 6 L$htable_polyval_prefix_loop: subq $16,%r11 vmovdqu (%rsi),%xmm0 vpclmulqdq $0x00,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x01,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x10,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 testq %r11,%r11 leaq 16(%rsi),%rsi jnz L$htable_polyval_prefix_loop L$htable_polyval_prefix_complete: vpsrldq $8,%xmm5,%xmm6 vpslldq $8,%xmm5,%xmm5 vpxor %xmm6,%xmm4,%xmm9 vpxor %xmm5,%xmm3,%xmm1 jmp L$htable_polyval_main_loop L$htable_polyval_no_prefix: vpxor %xmm1,%xmm1,%xmm1 vmovdqa (%rcx),%xmm9 .p2align 6 L$htable_polyval_main_loop: subq $0x80,%rdx jb L$htable_polyval_out vmovdqu 112(%rsi),%xmm0 vpclmulqdq $0x01,(%rdi),%xmm0,%xmm5 vpclmulqdq $0x00,(%rdi),%xmm0,%xmm3 vpclmulqdq $0x11,(%rdi),%xmm0,%xmm4 vpclmulqdq $0x10,(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vmovdqu 96(%rsi),%xmm0 vpclmulqdq $0x01,16(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,16(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,16(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,16(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vmovdqu 80(%rsi),%xmm0 vpclmulqdq $0x10,poly(%rip),%xmm1,%xmm7 vpalignr $8,%xmm1,%xmm1,%xmm1 vpclmulqdq $0x01,32(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,32(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,32(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,32(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpxor %xmm7,%xmm1,%xmm1 vmovdqu 64(%rsi),%xmm0 vpclmulqdq $0x01,48(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,48(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,48(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,48(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vmovdqu 48(%rsi),%xmm0 vpclmulqdq $0x10,poly(%rip),%xmm1,%xmm7 vpalignr $8,%xmm1,%xmm1,%xmm1 vpclmulqdq $0x01,64(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,64(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,64(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,64(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpxor %xmm7,%xmm1,%xmm1 vmovdqu 32(%rsi),%xmm0 vpclmulqdq $0x01,80(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,80(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,80(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,80(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpxor %xmm9,%xmm1,%xmm1 vmovdqu 16(%rsi),%xmm0 vpclmulqdq $0x01,96(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,96(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,96(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,96(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vmovdqu 0(%rsi),%xmm0 vpxor %xmm1,%xmm0,%xmm0 vpclmulqdq $0x01,112(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,112(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,112(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,112(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpsrldq $8,%xmm5,%xmm6 vpslldq $8,%xmm5,%xmm5 vpxor %xmm6,%xmm4,%xmm9 vpxor %xmm5,%xmm3,%xmm1 leaq 128(%rsi),%rsi jmp L$htable_polyval_main_loop L$htable_polyval_out: vpclmulqdq $0x10,poly(%rip),%xmm1,%xmm6 vpalignr $8,%xmm1,%xmm1,%xmm1 vpxor %xmm6,%xmm1,%xmm1 vpclmulqdq $0x10,poly(%rip),%xmm1,%xmm6 vpalignr $8,%xmm1,%xmm1,%xmm1 vpxor %xmm6,%xmm1,%xmm1 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,(%rcx) vzeroupper .byte 0xf3,0xc3 .globl _aesgcmsiv_polyval_horner .private_extern _aesgcmsiv_polyval_horner .p2align 4 _aesgcmsiv_polyval_horner: _CET_ENDBR testq %rcx,%rcx jnz L$polyval_horner_start .byte 0xf3,0xc3 L$polyval_horner_start: xorq %r10,%r10 shlq $4,%rcx vmovdqa (%rsi),%xmm1 vmovdqa (%rdi),%xmm0 L$polyval_horner_loop: vpxor (%rdx,%r10,1),%xmm0,%xmm0 call GFMUL addq $16,%r10 cmpq %r10,%rcx jne L$polyval_horner_loop vmovdqa %xmm0,(%rdi) .byte 0xf3,0xc3 .globl _aes128gcmsiv_aes_ks .private_extern _aes128gcmsiv_aes_ks .p2align 4 _aes128gcmsiv_aes_ks: _CET_ENDBR vmovdqu (%rdi),%xmm1 vmovdqa %xmm1,(%rsi) vmovdqa con1(%rip),%xmm0 vmovdqa mask(%rip),%xmm15 movq $8,%rax L$ks128_loop: addq $16,%rsi subq $1,%rax vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,(%rsi) jne L$ks128_loop vmovdqa con2(%rip),%xmm0 vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,16(%rsi) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslldq $4,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,32(%rsi) .byte 0xf3,0xc3 .globl _aes256gcmsiv_aes_ks .private_extern _aes256gcmsiv_aes_ks .p2align 4 _aes256gcmsiv_aes_ks: _CET_ENDBR vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm3 vmovdqa %xmm1,(%rsi) vmovdqa %xmm3,16(%rsi) vmovdqa con1(%rip),%xmm0 vmovdqa mask(%rip),%xmm15 vpxor %xmm14,%xmm14,%xmm14 movq $6,%rax L$ks256_loop: addq $32,%rsi subq $1,%rax vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,(%rsi) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpsllq $32,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpshufb con3(%rip),%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vmovdqa %xmm3,16(%rsi) jne L$ks256_loop vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpsllq $32,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,32(%rsi) .byte 0xf3,0xc3 .globl _aes128gcmsiv_aes_ks_enc_x1 .private_extern _aes128gcmsiv_aes_ks_enc_x1 .p2align 4 _aes128gcmsiv_aes_ks_enc_x1: _CET_ENDBR vmovdqa (%rcx),%xmm1 vmovdqa 0(%rdi),%xmm4 vmovdqa %xmm1,(%rdx) vpxor %xmm1,%xmm4,%xmm4 vmovdqa con1(%rip),%xmm0 vmovdqa mask(%rip),%xmm15 vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,16(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,32(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,48(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,64(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,80(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,96(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,112(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,128(%rdx) vmovdqa con2(%rip),%xmm0 vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,144(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenclast %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,160(%rdx) vmovdqa %xmm4,0(%rsi) .byte 0xf3,0xc3 .globl _aes128gcmsiv_kdf .private_extern _aes128gcmsiv_kdf .p2align 4 _aes128gcmsiv_kdf: _CET_ENDBR vmovdqa (%rdx),%xmm1 vmovdqa 0(%rdi),%xmm9 vmovdqa and_mask(%rip),%xmm12 vmovdqa one(%rip),%xmm13 vpshufd $0x90,%xmm9,%xmm9 vpand %xmm12,%xmm9,%xmm9 vpaddd %xmm13,%xmm9,%xmm10 vpaddd %xmm13,%xmm10,%xmm11 vpaddd %xmm13,%xmm11,%xmm12 vpxor %xmm1,%xmm9,%xmm9 vpxor %xmm1,%xmm10,%xmm10 vpxor %xmm1,%xmm11,%xmm11 vpxor %xmm1,%xmm12,%xmm12 vmovdqa 16(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 32(%rdx),%xmm2 vaesenc %xmm2,%xmm9,%xmm9 vaesenc %xmm2,%xmm10,%xmm10 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vmovdqa 48(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 64(%rdx),%xmm2 vaesenc %xmm2,%xmm9,%xmm9 vaesenc %xmm2,%xmm10,%xmm10 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vmovdqa 80(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 96(%rdx),%xmm2 vaesenc %xmm2,%xmm9,%xmm9 vaesenc %xmm2,%xmm10,%xmm10 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vmovdqa 112(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 128(%rdx),%xmm2 vaesenc %xmm2,%xmm9,%xmm9 vaesenc %xmm2,%xmm10,%xmm10 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vmovdqa 144(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 160(%rdx),%xmm2 vaesenclast %xmm2,%xmm9,%xmm9 vaesenclast %xmm2,%xmm10,%xmm10 vaesenclast %xmm2,%xmm11,%xmm11 vaesenclast %xmm2,%xmm12,%xmm12 vmovdqa %xmm9,0(%rsi) vmovdqa %xmm10,16(%rsi) vmovdqa %xmm11,32(%rsi) vmovdqa %xmm12,48(%rsi) .byte 0xf3,0xc3 .globl _aes128gcmsiv_enc_msg_x4 .private_extern _aes128gcmsiv_enc_msg_x4 .p2align 4 _aes128gcmsiv_enc_msg_x4: _CET_ENDBR testq %r8,%r8 jnz L$128_enc_msg_x4_start .byte 0xf3,0xc3 L$128_enc_msg_x4_start: pushq %r12 pushq %r13 shrq $4,%r8 movq %r8,%r10 shlq $62,%r10 shrq $62,%r10 vmovdqa (%rdx),%xmm15 vpor OR_MASK(%rip),%xmm15,%xmm15 vmovdqu four(%rip),%xmm4 vmovdqa %xmm15,%xmm0 vpaddd one(%rip),%xmm15,%xmm1 vpaddd two(%rip),%xmm15,%xmm2 vpaddd three(%rip),%xmm15,%xmm3 shrq $2,%r8 je L$128_enc_msg_x4_check_remainder subq $64,%rsi subq $64,%rdi L$128_enc_msg_x4_loop1: addq $64,%rsi addq $64,%rdi vmovdqa %xmm0,%xmm5 vmovdqa %xmm1,%xmm6 vmovdqa %xmm2,%xmm7 vmovdqa %xmm3,%xmm8 vpxor (%rcx),%xmm5,%xmm5 vpxor (%rcx),%xmm6,%xmm6 vpxor (%rcx),%xmm7,%xmm7 vpxor (%rcx),%xmm8,%xmm8 vmovdqu 16(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm0,%xmm0 vmovdqu 32(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm1,%xmm1 vmovdqu 48(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm2,%xmm2 vmovdqu 64(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm3,%xmm3 vmovdqu 80(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 96(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 112(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 128(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 144(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 160(%rcx),%xmm12 vaesenclast %xmm12,%xmm5,%xmm5 vaesenclast %xmm12,%xmm6,%xmm6 vaesenclast %xmm12,%xmm7,%xmm7 vaesenclast %xmm12,%xmm8,%xmm8 vpxor 0(%rdi),%xmm5,%xmm5 vpxor 16(%rdi),%xmm6,%xmm6 vpxor 32(%rdi),%xmm7,%xmm7 vpxor 48(%rdi),%xmm8,%xmm8 subq $1,%r8 vmovdqu %xmm5,0(%rsi) vmovdqu %xmm6,16(%rsi) vmovdqu %xmm7,32(%rsi) vmovdqu %xmm8,48(%rsi) jne L$128_enc_msg_x4_loop1 addq $64,%rsi addq $64,%rdi L$128_enc_msg_x4_check_remainder: cmpq $0,%r10 je L$128_enc_msg_x4_out L$128_enc_msg_x4_loop2: vmovdqa %xmm0,%xmm5 vpaddd one(%rip),%xmm0,%xmm0 vpxor (%rcx),%xmm5,%xmm5 vaesenc 16(%rcx),%xmm5,%xmm5 vaesenc 32(%rcx),%xmm5,%xmm5 vaesenc 48(%rcx),%xmm5,%xmm5 vaesenc 64(%rcx),%xmm5,%xmm5 vaesenc 80(%rcx),%xmm5,%xmm5 vaesenc 96(%rcx),%xmm5,%xmm5 vaesenc 112(%rcx),%xmm5,%xmm5 vaesenc 128(%rcx),%xmm5,%xmm5 vaesenc 144(%rcx),%xmm5,%xmm5 vaesenclast 160(%rcx),%xmm5,%xmm5 vpxor (%rdi),%xmm5,%xmm5 vmovdqu %xmm5,(%rsi) addq $16,%rdi addq $16,%rsi subq $1,%r10 jne L$128_enc_msg_x4_loop2 L$128_enc_msg_x4_out: popq %r13 popq %r12 .byte 0xf3,0xc3 .globl _aes128gcmsiv_enc_msg_x8 .private_extern _aes128gcmsiv_enc_msg_x8 .p2align 4 _aes128gcmsiv_enc_msg_x8: _CET_ENDBR testq %r8,%r8 jnz L$128_enc_msg_x8_start .byte 0xf3,0xc3 L$128_enc_msg_x8_start: pushq %r12 pushq %r13 pushq %rbp movq %rsp,%rbp subq $128,%rsp andq $-64,%rsp shrq $4,%r8 movq %r8,%r10 shlq $61,%r10 shrq $61,%r10 vmovdqu (%rdx),%xmm1 vpor OR_MASK(%rip),%xmm1,%xmm1 vpaddd seven(%rip),%xmm1,%xmm0 vmovdqu %xmm0,(%rsp) vpaddd one(%rip),%xmm1,%xmm9 vpaddd two(%rip),%xmm1,%xmm10 vpaddd three(%rip),%xmm1,%xmm11 vpaddd four(%rip),%xmm1,%xmm12 vpaddd five(%rip),%xmm1,%xmm13 vpaddd six(%rip),%xmm1,%xmm14 vmovdqa %xmm1,%xmm0 shrq $3,%r8 je L$128_enc_msg_x8_check_remainder subq $128,%rsi subq $128,%rdi L$128_enc_msg_x8_loop1: addq $128,%rsi addq $128,%rdi vmovdqa %xmm0,%xmm1 vmovdqa %xmm9,%xmm2 vmovdqa %xmm10,%xmm3 vmovdqa %xmm11,%xmm4 vmovdqa %xmm12,%xmm5 vmovdqa %xmm13,%xmm6 vmovdqa %xmm14,%xmm7 vmovdqu (%rsp),%xmm8 vpxor (%rcx),%xmm1,%xmm1 vpxor (%rcx),%xmm2,%xmm2 vpxor (%rcx),%xmm3,%xmm3 vpxor (%rcx),%xmm4,%xmm4 vpxor (%rcx),%xmm5,%xmm5 vpxor (%rcx),%xmm6,%xmm6 vpxor (%rcx),%xmm7,%xmm7 vpxor (%rcx),%xmm8,%xmm8 vmovdqu 16(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu (%rsp),%xmm14 vpaddd eight(%rip),%xmm14,%xmm14 vmovdqu %xmm14,(%rsp) vmovdqu 32(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpsubd one(%rip),%xmm14,%xmm14 vmovdqu 48(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm0,%xmm0 vmovdqu 64(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm9,%xmm9 vmovdqu 80(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm10,%xmm10 vmovdqu 96(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm11,%xmm11 vmovdqu 112(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm12,%xmm12 vmovdqu 128(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm13,%xmm13 vmovdqu 144(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 160(%rcx),%xmm15 vaesenclast %xmm15,%xmm1,%xmm1 vaesenclast %xmm15,%xmm2,%xmm2 vaesenclast %xmm15,%xmm3,%xmm3 vaesenclast %xmm15,%xmm4,%xmm4 vaesenclast %xmm15,%xmm5,%xmm5 vaesenclast %xmm15,%xmm6,%xmm6 vaesenclast %xmm15,%xmm7,%xmm7 vaesenclast %xmm15,%xmm8,%xmm8 vpxor 0(%rdi),%xmm1,%xmm1 vpxor 16(%rdi),%xmm2,%xmm2 vpxor 32(%rdi),%xmm3,%xmm3 vpxor 48(%rdi),%xmm4,%xmm4 vpxor 64(%rdi),%xmm5,%xmm5 vpxor 80(%rdi),%xmm6,%xmm6 vpxor 96(%rdi),%xmm7,%xmm7 vpxor 112(%rdi),%xmm8,%xmm8 decq %r8 vmovdqu %xmm1,0(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) vmovdqu %xmm7,96(%rsi) vmovdqu %xmm8,112(%rsi) jne L$128_enc_msg_x8_loop1 addq $128,%rsi addq $128,%rdi L$128_enc_msg_x8_check_remainder: cmpq $0,%r10 je L$128_enc_msg_x8_out L$128_enc_msg_x8_loop2: vmovdqa %xmm0,%xmm1 vpaddd one(%rip),%xmm0,%xmm0 vpxor (%rcx),%xmm1,%xmm1 vaesenc 16(%rcx),%xmm1,%xmm1 vaesenc 32(%rcx),%xmm1,%xmm1 vaesenc 48(%rcx),%xmm1,%xmm1 vaesenc 64(%rcx),%xmm1,%xmm1 vaesenc 80(%rcx),%xmm1,%xmm1 vaesenc 96(%rcx),%xmm1,%xmm1 vaesenc 112(%rcx),%xmm1,%xmm1 vaesenc 128(%rcx),%xmm1,%xmm1 vaesenc 144(%rcx),%xmm1,%xmm1 vaesenclast 160(%rcx),%xmm1,%xmm1 vpxor (%rdi),%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $16,%rdi addq $16,%rsi decq %r10 jne L$128_enc_msg_x8_loop2 L$128_enc_msg_x8_out: movq %rbp,%rsp popq %rbp popq %r13 popq %r12 .byte 0xf3,0xc3 .globl _aes128gcmsiv_dec .private_extern _aes128gcmsiv_dec .p2align 4 _aes128gcmsiv_dec: _CET_ENDBR testq $~15,%r9 jnz L$128_dec_start .byte 0xf3,0xc3 L$128_dec_start: vzeroupper vmovdqa (%rdx),%xmm0 vmovdqu 16(%rdx),%xmm15 vpor OR_MASK(%rip),%xmm15,%xmm15 movq %rdx,%rax leaq 32(%rax),%rax leaq 32(%rcx),%rcx andq $~15,%r9 cmpq $96,%r9 jb L$128_dec_loop2 subq $96,%r9 vmovdqa %xmm15,%xmm7 vpaddd one(%rip),%xmm7,%xmm8 vpaddd two(%rip),%xmm7,%xmm9 vpaddd one(%rip),%xmm9,%xmm10 vpaddd two(%rip),%xmm9,%xmm11 vpaddd one(%rip),%xmm11,%xmm12 vpaddd two(%rip),%xmm11,%xmm15 vpxor (%r8),%xmm7,%xmm7 vpxor (%r8),%xmm8,%xmm8 vpxor (%r8),%xmm9,%xmm9 vpxor (%r8),%xmm10,%xmm10 vpxor (%r8),%xmm11,%xmm11 vpxor (%r8),%xmm12,%xmm12 vmovdqu 16(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 32(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 48(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 64(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 80(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 96(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 112(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 128(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 144(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 160(%r8),%xmm4 vaesenclast %xmm4,%xmm7,%xmm7 vaesenclast %xmm4,%xmm8,%xmm8 vaesenclast %xmm4,%xmm9,%xmm9 vaesenclast %xmm4,%xmm10,%xmm10 vaesenclast %xmm4,%xmm11,%xmm11 vaesenclast %xmm4,%xmm12,%xmm12 vpxor 0(%rdi),%xmm7,%xmm7 vpxor 16(%rdi),%xmm8,%xmm8 vpxor 32(%rdi),%xmm9,%xmm9 vpxor 48(%rdi),%xmm10,%xmm10 vpxor 64(%rdi),%xmm11,%xmm11 vpxor 80(%rdi),%xmm12,%xmm12 vmovdqu %xmm7,0(%rsi) vmovdqu %xmm8,16(%rsi) vmovdqu %xmm9,32(%rsi) vmovdqu %xmm10,48(%rsi) vmovdqu %xmm11,64(%rsi) vmovdqu %xmm12,80(%rsi) addq $96,%rdi addq $96,%rsi jmp L$128_dec_loop1 .p2align 6 L$128_dec_loop1: cmpq $96,%r9 jb L$128_dec_finish_96 subq $96,%r9 vmovdqa %xmm12,%xmm6 vmovdqa %xmm11,16-32(%rax) vmovdqa %xmm10,32-32(%rax) vmovdqa %xmm9,48-32(%rax) vmovdqa %xmm8,64-32(%rax) vmovdqa %xmm7,80-32(%rax) vmovdqa %xmm15,%xmm7 vpaddd one(%rip),%xmm7,%xmm8 vpaddd two(%rip),%xmm7,%xmm9 vpaddd one(%rip),%xmm9,%xmm10 vpaddd two(%rip),%xmm9,%xmm11 vpaddd one(%rip),%xmm11,%xmm12 vpaddd two(%rip),%xmm11,%xmm15 vmovdqa (%r8),%xmm4 vpxor %xmm4,%xmm7,%xmm7 vpxor %xmm4,%xmm8,%xmm8 vpxor %xmm4,%xmm9,%xmm9 vpxor %xmm4,%xmm10,%xmm10 vpxor %xmm4,%xmm11,%xmm11 vpxor %xmm4,%xmm12,%xmm12 vmovdqu 0-32(%rcx),%xmm4 vpclmulqdq $0x11,%xmm4,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm4,%xmm6,%xmm3 vpclmulqdq $0x01,%xmm4,%xmm6,%xmm1 vpclmulqdq $0x10,%xmm4,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 16(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu -16(%rax),%xmm6 vmovdqu -16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 32(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 0(%rax),%xmm6 vmovdqu 0(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 48(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 16(%rax),%xmm6 vmovdqu 16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 64(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 32(%rax),%xmm6 vmovdqu 32(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 80(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 96(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 112(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqa 80-32(%rax),%xmm6 vpxor %xmm0,%xmm6,%xmm6 vmovdqu 80-32(%rcx),%xmm5 vpclmulqdq $0x01,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x10,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 128(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vpsrldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm2,%xmm5 vpslldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm0 vmovdqa poly(%rip),%xmm3 vmovdqu 144(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 160(%r8),%xmm6 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpxor 0(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm7,%xmm7 vpxor 16(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm8,%xmm8 vpxor 32(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm9,%xmm9 vpxor 48(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm10,%xmm10 vpxor 64(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm11,%xmm11 vpxor 80(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm12,%xmm12 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vmovdqu %xmm7,0(%rsi) vmovdqu %xmm8,16(%rsi) vmovdqu %xmm9,32(%rsi) vmovdqu %xmm10,48(%rsi) vmovdqu %xmm11,64(%rsi) vmovdqu %xmm12,80(%rsi) vpxor %xmm5,%xmm0,%xmm0 leaq 96(%rdi),%rdi leaq 96(%rsi),%rsi jmp L$128_dec_loop1 L$128_dec_finish_96: vmovdqa %xmm12,%xmm6 vmovdqa %xmm11,16-32(%rax) vmovdqa %xmm10,32-32(%rax) vmovdqa %xmm9,48-32(%rax) vmovdqa %xmm8,64-32(%rax) vmovdqa %xmm7,80-32(%rax) vmovdqu 0-32(%rcx),%xmm4 vpclmulqdq $0x10,%xmm4,%xmm6,%xmm1 vpclmulqdq $0x11,%xmm4,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm4,%xmm6,%xmm3 vpclmulqdq $0x01,%xmm4,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu -16(%rax),%xmm6 vmovdqu -16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 0(%rax),%xmm6 vmovdqu 0(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 16(%rax),%xmm6 vmovdqu 16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 32(%rax),%xmm6 vmovdqu 32(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 80-32(%rax),%xmm6 vpxor %xmm0,%xmm6,%xmm6 vmovdqu 80-32(%rcx),%xmm5 vpclmulqdq $0x11,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x10,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x01,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpsrldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm2,%xmm5 vpslldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm0 vmovdqa poly(%rip),%xmm3 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpxor %xmm5,%xmm0,%xmm0 L$128_dec_loop2: cmpq $16,%r9 jb L$128_dec_out subq $16,%r9 vmovdqa %xmm15,%xmm2 vpaddd one(%rip),%xmm15,%xmm15 vpxor 0(%r8),%xmm2,%xmm2 vaesenc 16(%r8),%xmm2,%xmm2 vaesenc 32(%r8),%xmm2,%xmm2 vaesenc 48(%r8),%xmm2,%xmm2 vaesenc 64(%r8),%xmm2,%xmm2 vaesenc 80(%r8),%xmm2,%xmm2 vaesenc 96(%r8),%xmm2,%xmm2 vaesenc 112(%r8),%xmm2,%xmm2 vaesenc 128(%r8),%xmm2,%xmm2 vaesenc 144(%r8),%xmm2,%xmm2 vaesenclast 160(%r8),%xmm2,%xmm2 vpxor (%rdi),%xmm2,%xmm2 vmovdqu %xmm2,(%rsi) addq $16,%rdi addq $16,%rsi vpxor %xmm2,%xmm0,%xmm0 vmovdqa -32(%rcx),%xmm1 call GFMUL jmp L$128_dec_loop2 L$128_dec_out: vmovdqu %xmm0,(%rdx) .byte 0xf3,0xc3 .globl _aes128gcmsiv_ecb_enc_block .private_extern _aes128gcmsiv_ecb_enc_block .p2align 4 _aes128gcmsiv_ecb_enc_block: _CET_ENDBR vmovdqa (%rdi),%xmm1 vpxor (%rdx),%xmm1,%xmm1 vaesenc 16(%rdx),%xmm1,%xmm1 vaesenc 32(%rdx),%xmm1,%xmm1 vaesenc 48(%rdx),%xmm1,%xmm1 vaesenc 64(%rdx),%xmm1,%xmm1 vaesenc 80(%rdx),%xmm1,%xmm1 vaesenc 96(%rdx),%xmm1,%xmm1 vaesenc 112(%rdx),%xmm1,%xmm1 vaesenc 128(%rdx),%xmm1,%xmm1 vaesenc 144(%rdx),%xmm1,%xmm1 vaesenclast 160(%rdx),%xmm1,%xmm1 vmovdqa %xmm1,(%rsi) .byte 0xf3,0xc3 .globl _aes256gcmsiv_aes_ks_enc_x1 .private_extern _aes256gcmsiv_aes_ks_enc_x1 .p2align 4 _aes256gcmsiv_aes_ks_enc_x1: _CET_ENDBR vmovdqa con1(%rip),%xmm0 vmovdqa mask(%rip),%xmm15 vmovdqa (%rdi),%xmm8 vmovdqa (%rcx),%xmm1 vmovdqa 16(%rcx),%xmm3 vpxor %xmm1,%xmm8,%xmm8 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm1,(%rdx) vmovdqu %xmm3,16(%rdx) vpxor %xmm14,%xmm14,%xmm14 vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,32(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,48(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,64(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,80(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,96(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,112(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,128(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,144(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,160(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,176(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,192(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,208(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenclast %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,224(%rdx) vmovdqa %xmm8,(%rsi) .byte 0xf3,0xc3 .globl _aes256gcmsiv_ecb_enc_block .private_extern _aes256gcmsiv_ecb_enc_block .p2align 4 _aes256gcmsiv_ecb_enc_block: _CET_ENDBR vmovdqa (%rdi),%xmm1 vpxor (%rdx),%xmm1,%xmm1 vaesenc 16(%rdx),%xmm1,%xmm1 vaesenc 32(%rdx),%xmm1,%xmm1 vaesenc 48(%rdx),%xmm1,%xmm1 vaesenc 64(%rdx),%xmm1,%xmm1 vaesenc 80(%rdx),%xmm1,%xmm1 vaesenc 96(%rdx),%xmm1,%xmm1 vaesenc 112(%rdx),%xmm1,%xmm1 vaesenc 128(%rdx),%xmm1,%xmm1 vaesenc 144(%rdx),%xmm1,%xmm1 vaesenc 160(%rdx),%xmm1,%xmm1 vaesenc 176(%rdx),%xmm1,%xmm1 vaesenc 192(%rdx),%xmm1,%xmm1 vaesenc 208(%rdx),%xmm1,%xmm1 vaesenclast 224(%rdx),%xmm1,%xmm1 vmovdqa %xmm1,(%rsi) .byte 0xf3,0xc3 .globl _aes256gcmsiv_enc_msg_x4 .private_extern _aes256gcmsiv_enc_msg_x4 .p2align 4 _aes256gcmsiv_enc_msg_x4: _CET_ENDBR testq %r8,%r8 jnz L$256_enc_msg_x4_start .byte 0xf3,0xc3 L$256_enc_msg_x4_start: movq %r8,%r10 shrq $4,%r8 shlq $60,%r10 jz L$256_enc_msg_x4_start2 addq $1,%r8 L$256_enc_msg_x4_start2: movq %r8,%r10 shlq $62,%r10 shrq $62,%r10 vmovdqa (%rdx),%xmm15 vpor OR_MASK(%rip),%xmm15,%xmm15 vmovdqa four(%rip),%xmm4 vmovdqa %xmm15,%xmm0 vpaddd one(%rip),%xmm15,%xmm1 vpaddd two(%rip),%xmm15,%xmm2 vpaddd three(%rip),%xmm15,%xmm3 shrq $2,%r8 je L$256_enc_msg_x4_check_remainder subq $64,%rsi subq $64,%rdi L$256_enc_msg_x4_loop1: addq $64,%rsi addq $64,%rdi vmovdqa %xmm0,%xmm5 vmovdqa %xmm1,%xmm6 vmovdqa %xmm2,%xmm7 vmovdqa %xmm3,%xmm8 vpxor (%rcx),%xmm5,%xmm5 vpxor (%rcx),%xmm6,%xmm6 vpxor (%rcx),%xmm7,%xmm7 vpxor (%rcx),%xmm8,%xmm8 vmovdqu 16(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm0,%xmm0 vmovdqu 32(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm1,%xmm1 vmovdqu 48(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm2,%xmm2 vmovdqu 64(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm3,%xmm3 vmovdqu 80(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 96(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 112(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 128(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 144(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 160(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 176(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 192(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 208(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 224(%rcx),%xmm12 vaesenclast %xmm12,%xmm5,%xmm5 vaesenclast %xmm12,%xmm6,%xmm6 vaesenclast %xmm12,%xmm7,%xmm7 vaesenclast %xmm12,%xmm8,%xmm8 vpxor 0(%rdi),%xmm5,%xmm5 vpxor 16(%rdi),%xmm6,%xmm6 vpxor 32(%rdi),%xmm7,%xmm7 vpxor 48(%rdi),%xmm8,%xmm8 subq $1,%r8 vmovdqu %xmm5,0(%rsi) vmovdqu %xmm6,16(%rsi) vmovdqu %xmm7,32(%rsi) vmovdqu %xmm8,48(%rsi) jne L$256_enc_msg_x4_loop1 addq $64,%rsi addq $64,%rdi L$256_enc_msg_x4_check_remainder: cmpq $0,%r10 je L$256_enc_msg_x4_out L$256_enc_msg_x4_loop2: vmovdqa %xmm0,%xmm5 vpaddd one(%rip),%xmm0,%xmm0 vpxor (%rcx),%xmm5,%xmm5 vaesenc 16(%rcx),%xmm5,%xmm5 vaesenc 32(%rcx),%xmm5,%xmm5 vaesenc 48(%rcx),%xmm5,%xmm5 vaesenc 64(%rcx),%xmm5,%xmm5 vaesenc 80(%rcx),%xmm5,%xmm5 vaesenc 96(%rcx),%xmm5,%xmm5 vaesenc 112(%rcx),%xmm5,%xmm5 vaesenc 128(%rcx),%xmm5,%xmm5 vaesenc 144(%rcx),%xmm5,%xmm5 vaesenc 160(%rcx),%xmm5,%xmm5 vaesenc 176(%rcx),%xmm5,%xmm5 vaesenc 192(%rcx),%xmm5,%xmm5 vaesenc 208(%rcx),%xmm5,%xmm5 vaesenclast 224(%rcx),%xmm5,%xmm5 vpxor (%rdi),%xmm5,%xmm5 vmovdqu %xmm5,(%rsi) addq $16,%rdi addq $16,%rsi subq $1,%r10 jne L$256_enc_msg_x4_loop2 L$256_enc_msg_x4_out: .byte 0xf3,0xc3 .globl _aes256gcmsiv_enc_msg_x8 .private_extern _aes256gcmsiv_enc_msg_x8 .p2align 4 _aes256gcmsiv_enc_msg_x8: _CET_ENDBR testq %r8,%r8 jnz L$256_enc_msg_x8_start .byte 0xf3,0xc3 L$256_enc_msg_x8_start: movq %rsp,%r11 subq $16,%r11 andq $-64,%r11 movq %r8,%r10 shrq $4,%r8 shlq $60,%r10 jz L$256_enc_msg_x8_start2 addq $1,%r8 L$256_enc_msg_x8_start2: movq %r8,%r10 shlq $61,%r10 shrq $61,%r10 vmovdqa (%rdx),%xmm1 vpor OR_MASK(%rip),%xmm1,%xmm1 vpaddd seven(%rip),%xmm1,%xmm0 vmovdqa %xmm0,(%r11) vpaddd one(%rip),%xmm1,%xmm9 vpaddd two(%rip),%xmm1,%xmm10 vpaddd three(%rip),%xmm1,%xmm11 vpaddd four(%rip),%xmm1,%xmm12 vpaddd five(%rip),%xmm1,%xmm13 vpaddd six(%rip),%xmm1,%xmm14 vmovdqa %xmm1,%xmm0 shrq $3,%r8 jz L$256_enc_msg_x8_check_remainder subq $128,%rsi subq $128,%rdi L$256_enc_msg_x8_loop1: addq $128,%rsi addq $128,%rdi vmovdqa %xmm0,%xmm1 vmovdqa %xmm9,%xmm2 vmovdqa %xmm10,%xmm3 vmovdqa %xmm11,%xmm4 vmovdqa %xmm12,%xmm5 vmovdqa %xmm13,%xmm6 vmovdqa %xmm14,%xmm7 vmovdqa (%r11),%xmm8 vpxor (%rcx),%xmm1,%xmm1 vpxor (%rcx),%xmm2,%xmm2 vpxor (%rcx),%xmm3,%xmm3 vpxor (%rcx),%xmm4,%xmm4 vpxor (%rcx),%xmm5,%xmm5 vpxor (%rcx),%xmm6,%xmm6 vpxor (%rcx),%xmm7,%xmm7 vpxor (%rcx),%xmm8,%xmm8 vmovdqu 16(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqa (%r11),%xmm14 vpaddd eight(%rip),%xmm14,%xmm14 vmovdqa %xmm14,(%r11) vmovdqu 32(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpsubd one(%rip),%xmm14,%xmm14 vmovdqu 48(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm0,%xmm0 vmovdqu 64(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm9,%xmm9 vmovdqu 80(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm10,%xmm10 vmovdqu 96(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm11,%xmm11 vmovdqu 112(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm12,%xmm12 vmovdqu 128(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm13,%xmm13 vmovdqu 144(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 160(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 176(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 192(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 208(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 224(%rcx),%xmm15 vaesenclast %xmm15,%xmm1,%xmm1 vaesenclast %xmm15,%xmm2,%xmm2 vaesenclast %xmm15,%xmm3,%xmm3 vaesenclast %xmm15,%xmm4,%xmm4 vaesenclast %xmm15,%xmm5,%xmm5 vaesenclast %xmm15,%xmm6,%xmm6 vaesenclast %xmm15,%xmm7,%xmm7 vaesenclast %xmm15,%xmm8,%xmm8 vpxor 0(%rdi),%xmm1,%xmm1 vpxor 16(%rdi),%xmm2,%xmm2 vpxor 32(%rdi),%xmm3,%xmm3 vpxor 48(%rdi),%xmm4,%xmm4 vpxor 64(%rdi),%xmm5,%xmm5 vpxor 80(%rdi),%xmm6,%xmm6 vpxor 96(%rdi),%xmm7,%xmm7 vpxor 112(%rdi),%xmm8,%xmm8 subq $1,%r8 vmovdqu %xmm1,0(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) vmovdqu %xmm7,96(%rsi) vmovdqu %xmm8,112(%rsi) jne L$256_enc_msg_x8_loop1 addq $128,%rsi addq $128,%rdi L$256_enc_msg_x8_check_remainder: cmpq $0,%r10 je L$256_enc_msg_x8_out L$256_enc_msg_x8_loop2: vmovdqa %xmm0,%xmm1 vpaddd one(%rip),%xmm0,%xmm0 vpxor (%rcx),%xmm1,%xmm1 vaesenc 16(%rcx),%xmm1,%xmm1 vaesenc 32(%rcx),%xmm1,%xmm1 vaesenc 48(%rcx),%xmm1,%xmm1 vaesenc 64(%rcx),%xmm1,%xmm1 vaesenc 80(%rcx),%xmm1,%xmm1 vaesenc 96(%rcx),%xmm1,%xmm1 vaesenc 112(%rcx),%xmm1,%xmm1 vaesenc 128(%rcx),%xmm1,%xmm1 vaesenc 144(%rcx),%xmm1,%xmm1 vaesenc 160(%rcx),%xmm1,%xmm1 vaesenc 176(%rcx),%xmm1,%xmm1 vaesenc 192(%rcx),%xmm1,%xmm1 vaesenc 208(%rcx),%xmm1,%xmm1 vaesenclast 224(%rcx),%xmm1,%xmm1 vpxor (%rdi),%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $16,%rdi addq $16,%rsi subq $1,%r10 jnz L$256_enc_msg_x8_loop2 L$256_enc_msg_x8_out: .byte 0xf3,0xc3 .globl _aes256gcmsiv_dec .private_extern _aes256gcmsiv_dec .p2align 4 _aes256gcmsiv_dec: _CET_ENDBR testq $~15,%r9 jnz L$256_dec_start .byte 0xf3,0xc3 L$256_dec_start: vzeroupper vmovdqa (%rdx),%xmm0 vmovdqu 16(%rdx),%xmm15 vpor OR_MASK(%rip),%xmm15,%xmm15 movq %rdx,%rax leaq 32(%rax),%rax leaq 32(%rcx),%rcx andq $~15,%r9 cmpq $96,%r9 jb L$256_dec_loop2 subq $96,%r9 vmovdqa %xmm15,%xmm7 vpaddd one(%rip),%xmm7,%xmm8 vpaddd two(%rip),%xmm7,%xmm9 vpaddd one(%rip),%xmm9,%xmm10 vpaddd two(%rip),%xmm9,%xmm11 vpaddd one(%rip),%xmm11,%xmm12 vpaddd two(%rip),%xmm11,%xmm15 vpxor (%r8),%xmm7,%xmm7 vpxor (%r8),%xmm8,%xmm8 vpxor (%r8),%xmm9,%xmm9 vpxor (%r8),%xmm10,%xmm10 vpxor (%r8),%xmm11,%xmm11 vpxor (%r8),%xmm12,%xmm12 vmovdqu 16(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 32(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 48(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 64(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 80(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 96(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 112(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 128(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 144(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 160(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 176(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 192(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 208(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 224(%r8),%xmm4 vaesenclast %xmm4,%xmm7,%xmm7 vaesenclast %xmm4,%xmm8,%xmm8 vaesenclast %xmm4,%xmm9,%xmm9 vaesenclast %xmm4,%xmm10,%xmm10 vaesenclast %xmm4,%xmm11,%xmm11 vaesenclast %xmm4,%xmm12,%xmm12 vpxor 0(%rdi),%xmm7,%xmm7 vpxor 16(%rdi),%xmm8,%xmm8 vpxor 32(%rdi),%xmm9,%xmm9 vpxor 48(%rdi),%xmm10,%xmm10 vpxor 64(%rdi),%xmm11,%xmm11 vpxor 80(%rdi),%xmm12,%xmm12 vmovdqu %xmm7,0(%rsi) vmovdqu %xmm8,16(%rsi) vmovdqu %xmm9,32(%rsi) vmovdqu %xmm10,48(%rsi) vmovdqu %xmm11,64(%rsi) vmovdqu %xmm12,80(%rsi) addq $96,%rdi addq $96,%rsi jmp L$256_dec_loop1 .p2align 6 L$256_dec_loop1: cmpq $96,%r9 jb L$256_dec_finish_96 subq $96,%r9 vmovdqa %xmm12,%xmm6 vmovdqa %xmm11,16-32(%rax) vmovdqa %xmm10,32-32(%rax) vmovdqa %xmm9,48-32(%rax) vmovdqa %xmm8,64-32(%rax) vmovdqa %xmm7,80-32(%rax) vmovdqa %xmm15,%xmm7 vpaddd one(%rip),%xmm7,%xmm8 vpaddd two(%rip),%xmm7,%xmm9 vpaddd one(%rip),%xmm9,%xmm10 vpaddd two(%rip),%xmm9,%xmm11 vpaddd one(%rip),%xmm11,%xmm12 vpaddd two(%rip),%xmm11,%xmm15 vmovdqa (%r8),%xmm4 vpxor %xmm4,%xmm7,%xmm7 vpxor %xmm4,%xmm8,%xmm8 vpxor %xmm4,%xmm9,%xmm9 vpxor %xmm4,%xmm10,%xmm10 vpxor %xmm4,%xmm11,%xmm11 vpxor %xmm4,%xmm12,%xmm12 vmovdqu 0-32(%rcx),%xmm4 vpclmulqdq $0x11,%xmm4,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm4,%xmm6,%xmm3 vpclmulqdq $0x01,%xmm4,%xmm6,%xmm1 vpclmulqdq $0x10,%xmm4,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 16(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu -16(%rax),%xmm6 vmovdqu -16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 32(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 0(%rax),%xmm6 vmovdqu 0(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 48(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 16(%rax),%xmm6 vmovdqu 16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 64(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 32(%rax),%xmm6 vmovdqu 32(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 80(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 96(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 112(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqa 80-32(%rax),%xmm6 vpxor %xmm0,%xmm6,%xmm6 vmovdqu 80-32(%rcx),%xmm5 vpclmulqdq $0x01,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x10,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 128(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vpsrldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm2,%xmm5 vpslldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm0 vmovdqa poly(%rip),%xmm3 vmovdqu 144(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 160(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 176(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 192(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 208(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 224(%r8),%xmm6 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpxor 0(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm7,%xmm7 vpxor 16(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm8,%xmm8 vpxor 32(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm9,%xmm9 vpxor 48(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm10,%xmm10 vpxor 64(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm11,%xmm11 vpxor 80(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm12,%xmm12 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vmovdqu %xmm7,0(%rsi) vmovdqu %xmm8,16(%rsi) vmovdqu %xmm9,32(%rsi) vmovdqu %xmm10,48(%rsi) vmovdqu %xmm11,64(%rsi) vmovdqu %xmm12,80(%rsi) vpxor %xmm5,%xmm0,%xmm0 leaq 96(%rdi),%rdi leaq 96(%rsi),%rsi jmp L$256_dec_loop1 L$256_dec_finish_96: vmovdqa %xmm12,%xmm6 vmovdqa %xmm11,16-32(%rax) vmovdqa %xmm10,32-32(%rax) vmovdqa %xmm9,48-32(%rax) vmovdqa %xmm8,64-32(%rax) vmovdqa %xmm7,80-32(%rax) vmovdqu 0-32(%rcx),%xmm4 vpclmulqdq $0x10,%xmm4,%xmm6,%xmm1 vpclmulqdq $0x11,%xmm4,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm4,%xmm6,%xmm3 vpclmulqdq $0x01,%xmm4,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu -16(%rax),%xmm6 vmovdqu -16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 0(%rax),%xmm6 vmovdqu 0(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 16(%rax),%xmm6 vmovdqu 16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 32(%rax),%xmm6 vmovdqu 32(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 80-32(%rax),%xmm6 vpxor %xmm0,%xmm6,%xmm6 vmovdqu 80-32(%rcx),%xmm5 vpclmulqdq $0x11,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x10,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x01,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpsrldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm2,%xmm5 vpslldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm0 vmovdqa poly(%rip),%xmm3 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpxor %xmm5,%xmm0,%xmm0 L$256_dec_loop2: cmpq $16,%r9 jb L$256_dec_out subq $16,%r9 vmovdqa %xmm15,%xmm2 vpaddd one(%rip),%xmm15,%xmm15 vpxor 0(%r8),%xmm2,%xmm2 vaesenc 16(%r8),%xmm2,%xmm2 vaesenc 32(%r8),%xmm2,%xmm2 vaesenc 48(%r8),%xmm2,%xmm2 vaesenc 64(%r8),%xmm2,%xmm2 vaesenc 80(%r8),%xmm2,%xmm2 vaesenc 96(%r8),%xmm2,%xmm2 vaesenc 112(%r8),%xmm2,%xmm2 vaesenc 128(%r8),%xmm2,%xmm2 vaesenc 144(%r8),%xmm2,%xmm2 vaesenc 160(%r8),%xmm2,%xmm2 vaesenc 176(%r8),%xmm2,%xmm2 vaesenc 192(%r8),%xmm2,%xmm2 vaesenc 208(%r8),%xmm2,%xmm2 vaesenclast 224(%r8),%xmm2,%xmm2 vpxor (%rdi),%xmm2,%xmm2 vmovdqu %xmm2,(%rsi) addq $16,%rdi addq $16,%rsi vpxor %xmm2,%xmm0,%xmm0 vmovdqa -32(%rcx),%xmm1 call GFMUL jmp L$256_dec_loop2 L$256_dec_out: vmovdqu %xmm0,(%rdx) .byte 0xf3,0xc3 .globl _aes256gcmsiv_kdf .private_extern _aes256gcmsiv_kdf .p2align 4 _aes256gcmsiv_kdf: _CET_ENDBR vmovdqa (%rdx),%xmm1 vmovdqa 0(%rdi),%xmm4 vmovdqa and_mask(%rip),%xmm11 vmovdqa one(%rip),%xmm8 vpshufd $0x90,%xmm4,%xmm4 vpand %xmm11,%xmm4,%xmm4 vpaddd %xmm8,%xmm4,%xmm6 vpaddd %xmm8,%xmm6,%xmm7 vpaddd %xmm8,%xmm7,%xmm11 vpaddd %xmm8,%xmm11,%xmm12 vpaddd %xmm8,%xmm12,%xmm13 vpxor %xmm1,%xmm4,%xmm4 vpxor %xmm1,%xmm6,%xmm6 vpxor %xmm1,%xmm7,%xmm7 vpxor %xmm1,%xmm11,%xmm11 vpxor %xmm1,%xmm12,%xmm12 vpxor %xmm1,%xmm13,%xmm13 vmovdqa 16(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 32(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 48(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 64(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 80(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 96(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 112(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 128(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 144(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 160(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 176(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 192(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 208(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 224(%rdx),%xmm2 vaesenclast %xmm2,%xmm4,%xmm4 vaesenclast %xmm2,%xmm6,%xmm6 vaesenclast %xmm2,%xmm7,%xmm7 vaesenclast %xmm2,%xmm11,%xmm11 vaesenclast %xmm2,%xmm12,%xmm12 vaesenclast %xmm2,%xmm13,%xmm13 vmovdqa %xmm4,0(%rsi) vmovdqa %xmm6,16(%rsi) vmovdqa %xmm7,32(%rsi) vmovdqa %xmm11,48(%rsi) vmovdqa %xmm12,64(%rsi) vmovdqa %xmm13,80(%rsi) .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
57,255
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86_64/crypto/cipher_extra/aesni-sha1-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__APPLE__) .text .globl _aesni_cbc_sha1_enc .private_extern _aesni_cbc_sha1_enc .p2align 5 _aesni_cbc_sha1_enc: movl _OPENSSL_ia32cap_P+0(%rip),%r10d movq _OPENSSL_ia32cap_P+4(%rip),%r11 btq $61,%r11 jc aesni_cbc_sha1_enc_shaext andl $268435456,%r11d andl $1073741824,%r10d orl %r11d,%r10d cmpl $1342177280,%r10d je aesni_cbc_sha1_enc_avx jmp aesni_cbc_sha1_enc_ssse3 .byte 0xf3,0xc3 .p2align 5 aesni_cbc_sha1_enc_ssse3: movq 8(%rsp),%r10 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 leaq -104(%rsp),%rsp movq %rdi,%r12 movq %rsi,%r13 movq %rdx,%r14 leaq 112(%rcx),%r15 movdqu (%r8),%xmm2 movq %r8,88(%rsp) shlq $6,%r14 subq %r12,%r13 movl 240-112(%r15),%r8d addq %r10,%r14 leaq K_XX_XX(%rip),%r11 movl 0(%r9),%eax movl 4(%r9),%ebx movl 8(%r9),%ecx movl 12(%r9),%edx movl %ebx,%esi movl 16(%r9),%ebp movl %ecx,%edi xorl %edx,%edi andl %edi,%esi movdqa 64(%r11),%xmm3 movdqa 0(%r11),%xmm13 movdqu 0(%r10),%xmm4 movdqu 16(%r10),%xmm5 movdqu 32(%r10),%xmm6 movdqu 48(%r10),%xmm7 .byte 102,15,56,0,227 .byte 102,15,56,0,235 .byte 102,15,56,0,243 addq $64,%r10 paddd %xmm13,%xmm4 .byte 102,15,56,0,251 paddd %xmm13,%xmm5 paddd %xmm13,%xmm6 movdqa %xmm4,0(%rsp) psubd %xmm13,%xmm4 movdqa %xmm5,16(%rsp) psubd %xmm13,%xmm5 movdqa %xmm6,32(%rsp) psubd %xmm13,%xmm6 movups -112(%r15),%xmm15 movups 16-112(%r15),%xmm0 jmp L$oop_ssse3 .p2align 5 L$oop_ssse3: rorl $2,%ebx movups 0(%r12),%xmm14 xorps %xmm15,%xmm14 xorps %xmm14,%xmm2 movups -80(%r15),%xmm1 .byte 102,15,56,220,208 pshufd $238,%xmm4,%xmm8 xorl %edx,%esi movdqa %xmm7,%xmm12 paddd %xmm7,%xmm13 movl %eax,%edi addl 0(%rsp),%ebp punpcklqdq %xmm5,%xmm8 xorl %ecx,%ebx roll $5,%eax addl %esi,%ebp psrldq $4,%xmm12 andl %ebx,%edi xorl %ecx,%ebx pxor %xmm4,%xmm8 addl %eax,%ebp rorl $7,%eax pxor %xmm6,%xmm12 xorl %ecx,%edi movl %ebp,%esi addl 4(%rsp),%edx pxor %xmm12,%xmm8 xorl %ebx,%eax roll $5,%ebp movdqa %xmm13,48(%rsp) addl %edi,%edx movups -64(%r15),%xmm0 .byte 102,15,56,220,209 andl %eax,%esi movdqa %xmm8,%xmm3 xorl %ebx,%eax addl %ebp,%edx rorl $7,%ebp movdqa %xmm8,%xmm12 xorl %ebx,%esi pslldq $12,%xmm3 paddd %xmm8,%xmm8 movl %edx,%edi addl 8(%rsp),%ecx psrld $31,%xmm12 xorl %eax,%ebp roll $5,%edx addl %esi,%ecx movdqa %xmm3,%xmm13 andl %ebp,%edi xorl %eax,%ebp psrld $30,%xmm3 addl %edx,%ecx rorl $7,%edx por %xmm12,%xmm8 xorl %eax,%edi movl %ecx,%esi addl 12(%rsp),%ebx movups -48(%r15),%xmm1 .byte 102,15,56,220,208 pslld $2,%xmm13 pxor %xmm3,%xmm8 xorl %ebp,%edx movdqa 0(%r11),%xmm3 roll $5,%ecx addl %edi,%ebx andl %edx,%esi pxor %xmm13,%xmm8 xorl %ebp,%edx addl %ecx,%ebx rorl $7,%ecx pshufd $238,%xmm5,%xmm9 xorl %ebp,%esi movdqa %xmm8,%xmm13 paddd %xmm8,%xmm3 movl %ebx,%edi addl 16(%rsp),%eax punpcklqdq %xmm6,%xmm9 xorl %edx,%ecx roll $5,%ebx addl %esi,%eax psrldq $4,%xmm13 andl %ecx,%edi xorl %edx,%ecx pxor %xmm5,%xmm9 addl %ebx,%eax rorl $7,%ebx movups -32(%r15),%xmm0 .byte 102,15,56,220,209 pxor %xmm7,%xmm13 xorl %edx,%edi movl %eax,%esi addl 20(%rsp),%ebp pxor %xmm13,%xmm9 xorl %ecx,%ebx roll $5,%eax movdqa %xmm3,0(%rsp) addl %edi,%ebp andl %ebx,%esi movdqa %xmm9,%xmm12 xorl %ecx,%ebx addl %eax,%ebp rorl $7,%eax movdqa %xmm9,%xmm13 xorl %ecx,%esi pslldq $12,%xmm12 paddd %xmm9,%xmm9 movl %ebp,%edi addl 24(%rsp),%edx psrld $31,%xmm13 xorl %ebx,%eax roll $5,%ebp addl %esi,%edx movups -16(%r15),%xmm1 .byte 102,15,56,220,208 movdqa %xmm12,%xmm3 andl %eax,%edi xorl %ebx,%eax psrld $30,%xmm12 addl %ebp,%edx rorl $7,%ebp por %xmm13,%xmm9 xorl %ebx,%edi movl %edx,%esi addl 28(%rsp),%ecx pslld $2,%xmm3 pxor %xmm12,%xmm9 xorl %eax,%ebp movdqa 16(%r11),%xmm12 roll $5,%edx addl %edi,%ecx andl %ebp,%esi pxor %xmm3,%xmm9 xorl %eax,%ebp addl %edx,%ecx rorl $7,%edx pshufd $238,%xmm6,%xmm10 xorl %eax,%esi movdqa %xmm9,%xmm3 paddd %xmm9,%xmm12 movl %ecx,%edi addl 32(%rsp),%ebx movups 0(%r15),%xmm0 .byte 102,15,56,220,209 punpcklqdq %xmm7,%xmm10 xorl %ebp,%edx roll $5,%ecx addl %esi,%ebx psrldq $4,%xmm3 andl %edx,%edi xorl %ebp,%edx pxor %xmm6,%xmm10 addl %ecx,%ebx rorl $7,%ecx pxor %xmm8,%xmm3 xorl %ebp,%edi movl %ebx,%esi addl 36(%rsp),%eax pxor %xmm3,%xmm10 xorl %edx,%ecx roll $5,%ebx movdqa %xmm12,16(%rsp) addl %edi,%eax andl %ecx,%esi movdqa %xmm10,%xmm13 xorl %edx,%ecx addl %ebx,%eax rorl $7,%ebx movups 16(%r15),%xmm1 .byte 102,15,56,220,208 movdqa %xmm10,%xmm3 xorl %edx,%esi pslldq $12,%xmm13 paddd %xmm10,%xmm10 movl %eax,%edi addl 40(%rsp),%ebp psrld $31,%xmm3 xorl %ecx,%ebx roll $5,%eax addl %esi,%ebp movdqa %xmm13,%xmm12 andl %ebx,%edi xorl %ecx,%ebx psrld $30,%xmm13 addl %eax,%ebp rorl $7,%eax por %xmm3,%xmm10 xorl %ecx,%edi movl %ebp,%esi addl 44(%rsp),%edx pslld $2,%xmm12 pxor %xmm13,%xmm10 xorl %ebx,%eax movdqa 16(%r11),%xmm13 roll $5,%ebp addl %edi,%edx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 andl %eax,%esi pxor %xmm12,%xmm10 xorl %ebx,%eax addl %ebp,%edx rorl $7,%ebp pshufd $238,%xmm7,%xmm11 xorl %ebx,%esi movdqa %xmm10,%xmm12 paddd %xmm10,%xmm13 movl %edx,%edi addl 48(%rsp),%ecx punpcklqdq %xmm8,%xmm11 xorl %eax,%ebp roll $5,%edx addl %esi,%ecx psrldq $4,%xmm12 andl %ebp,%edi xorl %eax,%ebp pxor %xmm7,%xmm11 addl %edx,%ecx rorl $7,%edx pxor %xmm9,%xmm12 xorl %eax,%edi movl %ecx,%esi addl 52(%rsp),%ebx movups 48(%r15),%xmm1 .byte 102,15,56,220,208 pxor %xmm12,%xmm11 xorl %ebp,%edx roll $5,%ecx movdqa %xmm13,32(%rsp) addl %edi,%ebx andl %edx,%esi movdqa %xmm11,%xmm3 xorl %ebp,%edx addl %ecx,%ebx rorl $7,%ecx movdqa %xmm11,%xmm12 xorl %ebp,%esi pslldq $12,%xmm3 paddd %xmm11,%xmm11 movl %ebx,%edi addl 56(%rsp),%eax psrld $31,%xmm12 xorl %edx,%ecx roll $5,%ebx addl %esi,%eax movdqa %xmm3,%xmm13 andl %ecx,%edi xorl %edx,%ecx psrld $30,%xmm3 addl %ebx,%eax rorl $7,%ebx cmpl $11,%r8d jb L$aesenclast1 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je L$aesenclast1 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 L$aesenclast1: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 por %xmm12,%xmm11 xorl %edx,%edi movl %eax,%esi addl 60(%rsp),%ebp pslld $2,%xmm13 pxor %xmm3,%xmm11 xorl %ecx,%ebx movdqa 16(%r11),%xmm3 roll $5,%eax addl %edi,%ebp andl %ebx,%esi pxor %xmm13,%xmm11 pshufd $238,%xmm10,%xmm13 xorl %ecx,%ebx addl %eax,%ebp rorl $7,%eax pxor %xmm8,%xmm4 xorl %ecx,%esi movl %ebp,%edi addl 0(%rsp),%edx punpcklqdq %xmm11,%xmm13 xorl %ebx,%eax roll $5,%ebp pxor %xmm5,%xmm4 addl %esi,%edx movups 16(%r12),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,0(%r12,%r13,1) xorps %xmm14,%xmm2 movups -80(%r15),%xmm1 .byte 102,15,56,220,208 andl %eax,%edi movdqa %xmm3,%xmm12 xorl %ebx,%eax paddd %xmm11,%xmm3 addl %ebp,%edx pxor %xmm13,%xmm4 rorl $7,%ebp xorl %ebx,%edi movl %edx,%esi addl 4(%rsp),%ecx movdqa %xmm4,%xmm13 xorl %eax,%ebp roll $5,%edx movdqa %xmm3,48(%rsp) addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp pslld $2,%xmm4 addl %edx,%ecx rorl $7,%edx psrld $30,%xmm13 xorl %eax,%esi movl %ecx,%edi addl 8(%rsp),%ebx movups -64(%r15),%xmm0 .byte 102,15,56,220,209 por %xmm13,%xmm4 xorl %ebp,%edx roll $5,%ecx pshufd $238,%xmm11,%xmm3 addl %esi,%ebx andl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 12(%rsp),%eax xorl %ebp,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax pxor %xmm9,%xmm5 addl 16(%rsp),%ebp movups -48(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%esi punpcklqdq %xmm4,%xmm3 movl %eax,%edi roll $5,%eax pxor %xmm6,%xmm5 addl %esi,%ebp xorl %ecx,%edi movdqa %xmm12,%xmm13 rorl $7,%ebx paddd %xmm4,%xmm12 addl %eax,%ebp pxor %xmm3,%xmm5 addl 20(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp movdqa %xmm5,%xmm3 addl %edi,%edx xorl %ebx,%esi movdqa %xmm12,0(%rsp) rorl $7,%eax addl %ebp,%edx addl 24(%rsp),%ecx pslld $2,%xmm5 xorl %eax,%esi movl %edx,%edi psrld $30,%xmm3 roll $5,%edx addl %esi,%ecx movups -32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi rorl $7,%ebp por %xmm3,%xmm5 addl %edx,%ecx addl 28(%rsp),%ebx pshufd $238,%xmm4,%xmm12 xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx pxor %xmm10,%xmm6 addl 32(%rsp),%eax xorl %edx,%esi punpcklqdq %xmm5,%xmm12 movl %ebx,%edi roll $5,%ebx pxor %xmm7,%xmm6 addl %esi,%eax xorl %edx,%edi movdqa 32(%r11),%xmm3 rorl $7,%ecx paddd %xmm5,%xmm13 addl %ebx,%eax pxor %xmm12,%xmm6 addl 36(%rsp),%ebp movups -16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%edi movl %eax,%esi roll $5,%eax movdqa %xmm6,%xmm12 addl %edi,%ebp xorl %ecx,%esi movdqa %xmm13,16(%rsp) rorl $7,%ebx addl %eax,%ebp addl 40(%rsp),%edx pslld $2,%xmm6 xorl %ebx,%esi movl %ebp,%edi psrld $30,%xmm12 roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax por %xmm12,%xmm6 addl %ebp,%edx addl 44(%rsp),%ecx pshufd $238,%xmm5,%xmm13 xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx movups 0(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx pxor %xmm11,%xmm7 addl 48(%rsp),%ebx xorl %ebp,%esi punpcklqdq %xmm6,%xmm13 movl %ecx,%edi roll $5,%ecx pxor %xmm8,%xmm7 addl %esi,%ebx xorl %ebp,%edi movdqa %xmm3,%xmm12 rorl $7,%edx paddd %xmm6,%xmm3 addl %ecx,%ebx pxor %xmm13,%xmm7 addl 52(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx movdqa %xmm7,%xmm13 addl %edi,%eax xorl %edx,%esi movdqa %xmm3,32(%rsp) rorl $7,%ecx addl %ebx,%eax addl 56(%rsp),%ebp movups 16(%r15),%xmm1 .byte 102,15,56,220,208 pslld $2,%xmm7 xorl %ecx,%esi movl %eax,%edi psrld $30,%xmm13 roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx por %xmm13,%xmm7 addl %eax,%ebp addl 60(%rsp),%edx pshufd $238,%xmm6,%xmm3 xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx pxor %xmm4,%xmm8 addl 0(%rsp),%ecx xorl %eax,%esi punpcklqdq %xmm7,%xmm3 movl %edx,%edi roll $5,%edx pxor %xmm9,%xmm8 addl %esi,%ecx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi movdqa %xmm12,%xmm13 rorl $7,%ebp paddd %xmm7,%xmm12 addl %edx,%ecx pxor %xmm3,%xmm8 addl 4(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx movdqa %xmm8,%xmm3 addl %edi,%ebx xorl %ebp,%esi movdqa %xmm12,48(%rsp) rorl $7,%edx addl %ecx,%ebx addl 8(%rsp),%eax pslld $2,%xmm8 xorl %edx,%esi movl %ebx,%edi psrld $30,%xmm3 roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx por %xmm3,%xmm8 addl %ebx,%eax addl 12(%rsp),%ebp movups 48(%r15),%xmm1 .byte 102,15,56,220,208 pshufd $238,%xmm7,%xmm12 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp pxor %xmm5,%xmm9 addl 16(%rsp),%edx xorl %ebx,%esi punpcklqdq %xmm8,%xmm12 movl %ebp,%edi roll $5,%ebp pxor %xmm10,%xmm9 addl %esi,%edx xorl %ebx,%edi movdqa %xmm13,%xmm3 rorl $7,%eax paddd %xmm8,%xmm13 addl %ebp,%edx pxor %xmm12,%xmm9 addl 20(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx movdqa %xmm9,%xmm12 addl %edi,%ecx cmpl $11,%r8d jb L$aesenclast2 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je L$aesenclast2 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 L$aesenclast2: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 xorl %eax,%esi movdqa %xmm13,0(%rsp) rorl $7,%ebp addl %edx,%ecx addl 24(%rsp),%ebx pslld $2,%xmm9 xorl %ebp,%esi movl %ecx,%edi psrld $30,%xmm12 roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx por %xmm12,%xmm9 addl %ecx,%ebx addl 28(%rsp),%eax pshufd $238,%xmm8,%xmm13 rorl $7,%ecx movl %ebx,%esi xorl %edx,%edi roll $5,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax pxor %xmm6,%xmm10 addl 32(%rsp),%ebp movups 32(%r12),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,16(%r13,%r12,1) xorps %xmm14,%xmm2 movups -80(%r15),%xmm1 .byte 102,15,56,220,208 andl %ecx,%esi xorl %edx,%ecx rorl $7,%ebx punpcklqdq %xmm9,%xmm13 movl %eax,%edi xorl %ecx,%esi pxor %xmm11,%xmm10 roll $5,%eax addl %esi,%ebp movdqa %xmm3,%xmm12 xorl %ebx,%edi paddd %xmm9,%xmm3 xorl %ecx,%ebx pxor %xmm13,%xmm10 addl %eax,%ebp addl 36(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx rorl $7,%eax movdqa %xmm10,%xmm13 movl %ebp,%esi xorl %ebx,%edi movdqa %xmm3,16(%rsp) roll $5,%ebp addl %edi,%edx movups -64(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%esi pslld $2,%xmm10 xorl %ebx,%eax addl %ebp,%edx psrld $30,%xmm13 addl 40(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax por %xmm13,%xmm10 rorl $7,%ebp movl %edx,%edi xorl %eax,%esi roll $5,%edx pshufd $238,%xmm9,%xmm3 addl %esi,%ecx xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 44(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp rorl $7,%edx movups -48(%r15),%xmm1 .byte 102,15,56,220,208 movl %ecx,%esi xorl %ebp,%edi roll $5,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx pxor %xmm7,%xmm11 addl 48(%rsp),%eax andl %edx,%esi xorl %ebp,%edx rorl $7,%ecx punpcklqdq %xmm10,%xmm3 movl %ebx,%edi xorl %edx,%esi pxor %xmm4,%xmm11 roll $5,%ebx addl %esi,%eax movdqa 48(%r11),%xmm13 xorl %ecx,%edi paddd %xmm10,%xmm12 xorl %edx,%ecx pxor %xmm3,%xmm11 addl %ebx,%eax addl 52(%rsp),%ebp movups -32(%r15),%xmm0 .byte 102,15,56,220,209 andl %ecx,%edi xorl %edx,%ecx rorl $7,%ebx movdqa %xmm11,%xmm3 movl %eax,%esi xorl %ecx,%edi movdqa %xmm12,32(%rsp) roll $5,%eax addl %edi,%ebp xorl %ebx,%esi pslld $2,%xmm11 xorl %ecx,%ebx addl %eax,%ebp psrld $30,%xmm3 addl 56(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx por %xmm3,%xmm11 rorl $7,%eax movl %ebp,%edi xorl %ebx,%esi roll $5,%ebp pshufd $238,%xmm10,%xmm12 addl %esi,%edx movups -16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 60(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax rorl $7,%ebp movl %edx,%esi xorl %eax,%edi roll $5,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx pxor %xmm8,%xmm4 addl 0(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp rorl $7,%edx movups 0(%r15),%xmm0 .byte 102,15,56,220,209 punpcklqdq %xmm11,%xmm12 movl %ecx,%edi xorl %ebp,%esi pxor %xmm5,%xmm4 roll $5,%ecx addl %esi,%ebx movdqa %xmm13,%xmm3 xorl %edx,%edi paddd %xmm11,%xmm13 xorl %ebp,%edx pxor %xmm12,%xmm4 addl %ecx,%ebx addl 4(%rsp),%eax andl %edx,%edi xorl %ebp,%edx rorl $7,%ecx movdqa %xmm4,%xmm12 movl %ebx,%esi xorl %edx,%edi movdqa %xmm13,48(%rsp) roll $5,%ebx addl %edi,%eax xorl %ecx,%esi pslld $2,%xmm4 xorl %edx,%ecx addl %ebx,%eax psrld $30,%xmm12 addl 8(%rsp),%ebp movups 16(%r15),%xmm1 .byte 102,15,56,220,208 andl %ecx,%esi xorl %edx,%ecx por %xmm12,%xmm4 rorl $7,%ebx movl %eax,%edi xorl %ecx,%esi roll $5,%eax pshufd $238,%xmm11,%xmm13 addl %esi,%ebp xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 12(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx rorl $7,%eax movl %ebp,%esi xorl %ebx,%edi roll $5,%ebp addl %edi,%edx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx pxor %xmm9,%xmm5 addl 16(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax rorl $7,%ebp punpcklqdq %xmm4,%xmm13 movl %edx,%edi xorl %eax,%esi pxor %xmm6,%xmm5 roll $5,%edx addl %esi,%ecx movdqa %xmm3,%xmm12 xorl %ebp,%edi paddd %xmm4,%xmm3 xorl %eax,%ebp pxor %xmm13,%xmm5 addl %edx,%ecx addl 20(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp rorl $7,%edx movups 48(%r15),%xmm1 .byte 102,15,56,220,208 movdqa %xmm5,%xmm13 movl %ecx,%esi xorl %ebp,%edi movdqa %xmm3,0(%rsp) roll $5,%ecx addl %edi,%ebx xorl %edx,%esi pslld $2,%xmm5 xorl %ebp,%edx addl %ecx,%ebx psrld $30,%xmm13 addl 24(%rsp),%eax andl %edx,%esi xorl %ebp,%edx por %xmm13,%xmm5 rorl $7,%ecx movl %ebx,%edi xorl %edx,%esi roll $5,%ebx pshufd $238,%xmm4,%xmm3 addl %esi,%eax xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 28(%rsp),%ebp cmpl $11,%r8d jb L$aesenclast3 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je L$aesenclast3 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 L$aesenclast3: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 andl %ecx,%edi xorl %edx,%ecx rorl $7,%ebx movl %eax,%esi xorl %ecx,%edi roll $5,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp pxor %xmm10,%xmm6 addl 32(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx rorl $7,%eax punpcklqdq %xmm5,%xmm3 movl %ebp,%edi xorl %ebx,%esi pxor %xmm7,%xmm6 roll $5,%ebp addl %esi,%edx movups 48(%r12),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,32(%r13,%r12,1) xorps %xmm14,%xmm2 movups -80(%r15),%xmm1 .byte 102,15,56,220,208 movdqa %xmm12,%xmm13 xorl %eax,%edi paddd %xmm5,%xmm12 xorl %ebx,%eax pxor %xmm3,%xmm6 addl %ebp,%edx addl 36(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax rorl $7,%ebp movdqa %xmm6,%xmm3 movl %edx,%esi xorl %eax,%edi movdqa %xmm12,16(%rsp) roll $5,%edx addl %edi,%ecx xorl %ebp,%esi pslld $2,%xmm6 xorl %eax,%ebp addl %edx,%ecx psrld $30,%xmm3 addl 40(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp por %xmm3,%xmm6 rorl $7,%edx movups -64(%r15),%xmm0 .byte 102,15,56,220,209 movl %ecx,%edi xorl %ebp,%esi roll $5,%ecx pshufd $238,%xmm5,%xmm12 addl %esi,%ebx xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 44(%rsp),%eax andl %edx,%edi xorl %ebp,%edx rorl $7,%ecx movl %ebx,%esi xorl %edx,%edi roll $5,%ebx addl %edi,%eax xorl %edx,%esi addl %ebx,%eax pxor %xmm11,%xmm7 addl 48(%rsp),%ebp movups -48(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%esi punpcklqdq %xmm6,%xmm12 movl %eax,%edi roll $5,%eax pxor %xmm8,%xmm7 addl %esi,%ebp xorl %ecx,%edi movdqa %xmm13,%xmm3 rorl $7,%ebx paddd %xmm6,%xmm13 addl %eax,%ebp pxor %xmm12,%xmm7 addl 52(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp movdqa %xmm7,%xmm12 addl %edi,%edx xorl %ebx,%esi movdqa %xmm13,32(%rsp) rorl $7,%eax addl %ebp,%edx addl 56(%rsp),%ecx pslld $2,%xmm7 xorl %eax,%esi movl %edx,%edi psrld $30,%xmm12 roll $5,%edx addl %esi,%ecx movups -32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi rorl $7,%ebp por %xmm12,%xmm7 addl %edx,%ecx addl 60(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx addl 0(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx paddd %xmm7,%xmm3 addl %esi,%eax xorl %edx,%edi movdqa %xmm3,48(%rsp) rorl $7,%ecx addl %ebx,%eax addl 4(%rsp),%ebp movups -16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 8(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax addl %ebp,%edx addl 12(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx movups 0(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx cmpq %r14,%r10 je L$done_ssse3 movdqa 64(%r11),%xmm3 movdqa 0(%r11),%xmm13 movdqu 0(%r10),%xmm4 movdqu 16(%r10),%xmm5 movdqu 32(%r10),%xmm6 movdqu 48(%r10),%xmm7 .byte 102,15,56,0,227 addq $64,%r10 addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi .byte 102,15,56,0,235 roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx paddd %xmm13,%xmm4 addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi movdqa %xmm4,0(%rsp) roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx psubd %xmm13,%xmm4 addl %ebx,%eax addl 24(%rsp),%ebp movups 16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%esi movl %eax,%edi roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi .byte 102,15,56,0,243 roll $5,%edx addl %esi,%ecx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi rorl $7,%ebp paddd %xmm13,%xmm5 addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi movdqa %xmm5,16(%rsp) roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx psubd %xmm13,%xmm5 addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx addl %ebx,%eax addl 44(%rsp),%ebp movups 48(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi .byte 102,15,56,0,251 roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax paddd %xmm13,%xmm6 addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi movdqa %xmm6,32(%rsp) roll $5,%edx addl %edi,%ecx cmpl $11,%r8d jb L$aesenclast4 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je L$aesenclast4 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 L$aesenclast4: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 xorl %eax,%esi rorl $7,%ebp psubd %xmm13,%xmm6 addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax rorl $7,%ecx addl %ebx,%eax movups %xmm2,48(%r13,%r12,1) leaq 64(%r12),%r12 addl 0(%r9),%eax addl 4(%r9),%esi addl 8(%r9),%ecx addl 12(%r9),%edx movl %eax,0(%r9) addl 16(%r9),%ebp movl %esi,4(%r9) movl %esi,%ebx movl %ecx,8(%r9) movl %ecx,%edi movl %edx,12(%r9) xorl %edx,%edi movl %ebp,16(%r9) andl %edi,%esi jmp L$oop_ssse3 L$done_ssse3: addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax addl 24(%rsp),%ebp movups 16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%esi movl %eax,%edi roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi roll $5,%edx addl %esi,%ecx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi rorl $7,%ebp addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx addl %ebx,%eax addl 44(%rsp),%ebp movups 48(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx cmpl $11,%r8d jb L$aesenclast5 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je L$aesenclast5 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 L$aesenclast5: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax rorl $7,%ecx addl %ebx,%eax movups %xmm2,48(%r13,%r12,1) movq 88(%rsp),%r8 addl 0(%r9),%eax addl 4(%r9),%esi addl 8(%r9),%ecx movl %eax,0(%r9) addl 12(%r9),%edx movl %esi,4(%r9) addl 16(%r9),%ebp movl %ecx,8(%r9) movl %edx,12(%r9) movl %ebp,16(%r9) movups %xmm2,(%r8) leaq 104(%rsp),%rsi movq 0(%rsi),%r15 movq 8(%rsi),%r14 movq 16(%rsi),%r13 movq 24(%rsi),%r12 movq 32(%rsi),%rbp movq 40(%rsi),%rbx leaq 48(%rsi),%rsp L$epilogue_ssse3: .byte 0xf3,0xc3 .p2align 5 aesni_cbc_sha1_enc_avx: movq 8(%rsp),%r10 pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 leaq -104(%rsp),%rsp vzeroall movq %rdi,%r12 movq %rsi,%r13 movq %rdx,%r14 leaq 112(%rcx),%r15 vmovdqu (%r8),%xmm12 movq %r8,88(%rsp) shlq $6,%r14 subq %r12,%r13 movl 240-112(%r15),%r8d addq %r10,%r14 leaq K_XX_XX(%rip),%r11 movl 0(%r9),%eax movl 4(%r9),%ebx movl 8(%r9),%ecx movl 12(%r9),%edx movl %ebx,%esi movl 16(%r9),%ebp movl %ecx,%edi xorl %edx,%edi andl %edi,%esi vmovdqa 64(%r11),%xmm6 vmovdqa 0(%r11),%xmm10 vmovdqu 0(%r10),%xmm0 vmovdqu 16(%r10),%xmm1 vmovdqu 32(%r10),%xmm2 vmovdqu 48(%r10),%xmm3 vpshufb %xmm6,%xmm0,%xmm0 addq $64,%r10 vpshufb %xmm6,%xmm1,%xmm1 vpshufb %xmm6,%xmm2,%xmm2 vpshufb %xmm6,%xmm3,%xmm3 vpaddd %xmm10,%xmm0,%xmm4 vpaddd %xmm10,%xmm1,%xmm5 vpaddd %xmm10,%xmm2,%xmm6 vmovdqa %xmm4,0(%rsp) vmovdqa %xmm5,16(%rsp) vmovdqa %xmm6,32(%rsp) vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 jmp L$oop_avx .p2align 5 L$oop_avx: shrdl $2,%ebx,%ebx vmovdqu 0(%r12),%xmm13 vpxor %xmm15,%xmm13,%xmm13 vpxor %xmm13,%xmm12,%xmm12 vaesenc %xmm14,%xmm12,%xmm12 vmovups -80(%r15),%xmm15 xorl %edx,%esi vpalignr $8,%xmm0,%xmm1,%xmm4 movl %eax,%edi addl 0(%rsp),%ebp vpaddd %xmm3,%xmm10,%xmm9 xorl %ecx,%ebx shldl $5,%eax,%eax vpsrldq $4,%xmm3,%xmm8 addl %esi,%ebp andl %ebx,%edi vpxor %xmm0,%xmm4,%xmm4 xorl %ecx,%ebx addl %eax,%ebp vpxor %xmm2,%xmm8,%xmm8 shrdl $7,%eax,%eax xorl %ecx,%edi movl %ebp,%esi addl 4(%rsp),%edx vpxor %xmm8,%xmm4,%xmm4 xorl %ebx,%eax shldl $5,%ebp,%ebp vmovdqa %xmm9,48(%rsp) addl %edi,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups -64(%r15),%xmm14 andl %eax,%esi vpsrld $31,%xmm4,%xmm8 xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%esi vpslldq $12,%xmm4,%xmm9 vpaddd %xmm4,%xmm4,%xmm4 movl %edx,%edi addl 8(%rsp),%ecx xorl %eax,%ebp shldl $5,%edx,%edx vpor %xmm8,%xmm4,%xmm4 vpsrld $30,%xmm9,%xmm8 addl %esi,%ecx andl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx vpslld $2,%xmm9,%xmm9 vpxor %xmm8,%xmm4,%xmm4 shrdl $7,%edx,%edx xorl %eax,%edi movl %ecx,%esi addl 12(%rsp),%ebx vaesenc %xmm14,%xmm12,%xmm12 vmovups -48(%r15),%xmm15 vpxor %xmm9,%xmm4,%xmm4 xorl %ebp,%edx shldl $5,%ecx,%ecx addl %edi,%ebx andl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %ebp,%esi vpalignr $8,%xmm1,%xmm2,%xmm5 movl %ebx,%edi addl 16(%rsp),%eax vpaddd %xmm4,%xmm10,%xmm9 xorl %edx,%ecx shldl $5,%ebx,%ebx vpsrldq $4,%xmm4,%xmm8 addl %esi,%eax andl %ecx,%edi vpxor %xmm1,%xmm5,%xmm5 xorl %edx,%ecx addl %ebx,%eax vpxor %xmm3,%xmm8,%xmm8 shrdl $7,%ebx,%ebx vaesenc %xmm15,%xmm12,%xmm12 vmovups -32(%r15),%xmm14 xorl %edx,%edi movl %eax,%esi addl 20(%rsp),%ebp vpxor %xmm8,%xmm5,%xmm5 xorl %ecx,%ebx shldl $5,%eax,%eax vmovdqa %xmm9,0(%rsp) addl %edi,%ebp andl %ebx,%esi vpsrld $31,%xmm5,%xmm8 xorl %ecx,%ebx addl %eax,%ebp shrdl $7,%eax,%eax xorl %ecx,%esi vpslldq $12,%xmm5,%xmm9 vpaddd %xmm5,%xmm5,%xmm5 movl %ebp,%edi addl 24(%rsp),%edx xorl %ebx,%eax shldl $5,%ebp,%ebp vpor %xmm8,%xmm5,%xmm5 vpsrld $30,%xmm9,%xmm8 addl %esi,%edx vaesenc %xmm14,%xmm12,%xmm12 vmovups -16(%r15),%xmm15 andl %eax,%edi xorl %ebx,%eax addl %ebp,%edx vpslld $2,%xmm9,%xmm9 vpxor %xmm8,%xmm5,%xmm5 shrdl $7,%ebp,%ebp xorl %ebx,%edi movl %edx,%esi addl 28(%rsp),%ecx vpxor %xmm9,%xmm5,%xmm5 xorl %eax,%ebp shldl $5,%edx,%edx vmovdqa 16(%r11),%xmm10 addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi vpalignr $8,%xmm2,%xmm3,%xmm6 movl %ecx,%edi addl 32(%rsp),%ebx vaesenc %xmm15,%xmm12,%xmm12 vmovups 0(%r15),%xmm14 vpaddd %xmm5,%xmm10,%xmm9 xorl %ebp,%edx shldl $5,%ecx,%ecx vpsrldq $4,%xmm5,%xmm8 addl %esi,%ebx andl %edx,%edi vpxor %xmm2,%xmm6,%xmm6 xorl %ebp,%edx addl %ecx,%ebx vpxor %xmm4,%xmm8,%xmm8 shrdl $7,%ecx,%ecx xorl %ebp,%edi movl %ebx,%esi addl 36(%rsp),%eax vpxor %xmm8,%xmm6,%xmm6 xorl %edx,%ecx shldl $5,%ebx,%ebx vmovdqa %xmm9,16(%rsp) addl %edi,%eax andl %ecx,%esi vpsrld $31,%xmm6,%xmm8 xorl %edx,%ecx addl %ebx,%eax shrdl $7,%ebx,%ebx vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 xorl %edx,%esi vpslldq $12,%xmm6,%xmm9 vpaddd %xmm6,%xmm6,%xmm6 movl %eax,%edi addl 40(%rsp),%ebp xorl %ecx,%ebx shldl $5,%eax,%eax vpor %xmm8,%xmm6,%xmm6 vpsrld $30,%xmm9,%xmm8 addl %esi,%ebp andl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp vpslld $2,%xmm9,%xmm9 vpxor %xmm8,%xmm6,%xmm6 shrdl $7,%eax,%eax xorl %ecx,%edi movl %ebp,%esi addl 44(%rsp),%edx vpxor %xmm9,%xmm6,%xmm6 xorl %ebx,%eax shldl $5,%ebp,%ebp addl %edi,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 andl %eax,%esi xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%esi vpalignr $8,%xmm3,%xmm4,%xmm7 movl %edx,%edi addl 48(%rsp),%ecx vpaddd %xmm6,%xmm10,%xmm9 xorl %eax,%ebp shldl $5,%edx,%edx vpsrldq $4,%xmm6,%xmm8 addl %esi,%ecx andl %ebp,%edi vpxor %xmm3,%xmm7,%xmm7 xorl %eax,%ebp addl %edx,%ecx vpxor %xmm5,%xmm8,%xmm8 shrdl $7,%edx,%edx xorl %eax,%edi movl %ecx,%esi addl 52(%rsp),%ebx vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 vpxor %xmm8,%xmm7,%xmm7 xorl %ebp,%edx shldl $5,%ecx,%ecx vmovdqa %xmm9,32(%rsp) addl %edi,%ebx andl %edx,%esi vpsrld $31,%xmm7,%xmm8 xorl %ebp,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %ebp,%esi vpslldq $12,%xmm7,%xmm9 vpaddd %xmm7,%xmm7,%xmm7 movl %ebx,%edi addl 56(%rsp),%eax xorl %edx,%ecx shldl $5,%ebx,%ebx vpor %xmm8,%xmm7,%xmm7 vpsrld $30,%xmm9,%xmm8 addl %esi,%eax andl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax vpslld $2,%xmm9,%xmm9 vpxor %xmm8,%xmm7,%xmm7 shrdl $7,%ebx,%ebx cmpl $11,%r8d jb L$vaesenclast6 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je L$vaesenclast6 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 L$vaesenclast6: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 xorl %edx,%edi movl %eax,%esi addl 60(%rsp),%ebp vpxor %xmm9,%xmm7,%xmm7 xorl %ecx,%ebx shldl $5,%eax,%eax addl %edi,%ebp andl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp vpalignr $8,%xmm6,%xmm7,%xmm8 vpxor %xmm4,%xmm0,%xmm0 shrdl $7,%eax,%eax xorl %ecx,%esi movl %ebp,%edi addl 0(%rsp),%edx vpxor %xmm1,%xmm0,%xmm0 xorl %ebx,%eax shldl $5,%ebp,%ebp vpaddd %xmm7,%xmm10,%xmm9 addl %esi,%edx vmovdqu 16(%r12),%xmm13 vpxor %xmm15,%xmm13,%xmm13 vmovups %xmm12,0(%r12,%r13,1) vpxor %xmm13,%xmm12,%xmm12 vaesenc %xmm14,%xmm12,%xmm12 vmovups -80(%r15),%xmm15 andl %eax,%edi vpxor %xmm8,%xmm0,%xmm0 xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%edi vpsrld $30,%xmm0,%xmm8 vmovdqa %xmm9,48(%rsp) movl %edx,%esi addl 4(%rsp),%ecx xorl %eax,%ebp shldl $5,%edx,%edx vpslld $2,%xmm0,%xmm0 addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi movl %ecx,%edi addl 8(%rsp),%ebx vaesenc %xmm15,%xmm12,%xmm12 vmovups -64(%r15),%xmm14 vpor %xmm8,%xmm0,%xmm0 xorl %ebp,%edx shldl $5,%ecx,%ecx addl %esi,%ebx andl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 12(%rsp),%eax xorl %ebp,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpalignr $8,%xmm7,%xmm0,%xmm8 vpxor %xmm5,%xmm1,%xmm1 addl 16(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups -48(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax vpxor %xmm2,%xmm1,%xmm1 addl %esi,%ebp xorl %ecx,%edi vpaddd %xmm0,%xmm10,%xmm9 shrdl $7,%ebx,%ebx addl %eax,%ebp vpxor %xmm8,%xmm1,%xmm1 addl 20(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp vpsrld $30,%xmm1,%xmm8 vmovdqa %xmm9,0(%rsp) addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpslld $2,%xmm1,%xmm1 addl 24(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups -32(%r15),%xmm14 xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vpor %xmm8,%xmm1,%xmm1 addl 28(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpalignr $8,%xmm0,%xmm1,%xmm8 vpxor %xmm6,%xmm2,%xmm2 addl 32(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx vpxor %xmm3,%xmm2,%xmm2 addl %esi,%eax xorl %edx,%edi vpaddd %xmm1,%xmm10,%xmm9 vmovdqa 32(%r11),%xmm10 shrdl $7,%ecx,%ecx addl %ebx,%eax vpxor %xmm8,%xmm2,%xmm2 addl 36(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups -16(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax vpsrld $30,%xmm2,%xmm8 vmovdqa %xmm9,16(%rsp) addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp vpslld $2,%xmm2,%xmm2 addl 40(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx vpor %xmm8,%xmm2,%xmm2 addl 44(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 0(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx vpalignr $8,%xmm1,%xmm2,%xmm8 vpxor %xmm7,%xmm3,%xmm3 addl 48(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx vpxor %xmm4,%xmm3,%xmm3 addl %esi,%ebx xorl %ebp,%edi vpaddd %xmm2,%xmm10,%xmm9 shrdl $7,%edx,%edx addl %ecx,%ebx vpxor %xmm8,%xmm3,%xmm3 addl 52(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx vpsrld $30,%xmm3,%xmm8 vmovdqa %xmm9,32(%rsp) addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpslld $2,%xmm3,%xmm3 addl 56(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp vpor %xmm8,%xmm3,%xmm3 addl 60(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpalignr $8,%xmm2,%xmm3,%xmm8 vpxor %xmm0,%xmm4,%xmm4 addl 0(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx vpxor %xmm5,%xmm4,%xmm4 addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 xorl %eax,%edi vpaddd %xmm3,%xmm10,%xmm9 shrdl $7,%ebp,%ebp addl %edx,%ecx vpxor %xmm8,%xmm4,%xmm4 addl 4(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx vpsrld $30,%xmm4,%xmm8 vmovdqa %xmm9,48(%rsp) addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpslld $2,%xmm4,%xmm4 addl 8(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax vpor %xmm8,%xmm4,%xmm4 addl 12(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp vpalignr $8,%xmm3,%xmm4,%xmm8 vpxor %xmm1,%xmm5,%xmm5 addl 16(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp vpxor %xmm6,%xmm5,%xmm5 addl %esi,%edx xorl %ebx,%edi vpaddd %xmm4,%xmm10,%xmm9 shrdl $7,%eax,%eax addl %ebp,%edx vpxor %xmm8,%xmm5,%xmm5 addl 20(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx vpsrld $30,%xmm5,%xmm8 vmovdqa %xmm9,0(%rsp) addl %edi,%ecx cmpl $11,%r8d jb L$vaesenclast7 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je L$vaesenclast7 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 L$vaesenclast7: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx vpslld $2,%xmm5,%xmm5 addl 24(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx vpor %xmm8,%xmm5,%xmm5 addl 28(%rsp),%eax shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax vpalignr $8,%xmm4,%xmm5,%xmm8 vpxor %xmm2,%xmm6,%xmm6 addl 32(%rsp),%ebp vmovdqu 32(%r12),%xmm13 vpxor %xmm15,%xmm13,%xmm13 vmovups %xmm12,16(%r13,%r12,1) vpxor %xmm13,%xmm12,%xmm12 vaesenc %xmm14,%xmm12,%xmm12 vmovups -80(%r15),%xmm15 andl %ecx,%esi xorl %edx,%ecx shrdl $7,%ebx,%ebx vpxor %xmm7,%xmm6,%xmm6 movl %eax,%edi xorl %ecx,%esi vpaddd %xmm5,%xmm10,%xmm9 shldl $5,%eax,%eax addl %esi,%ebp vpxor %xmm8,%xmm6,%xmm6 xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 36(%rsp),%edx vpsrld $30,%xmm6,%xmm8 vmovdqa %xmm9,16(%rsp) andl %ebx,%edi xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%esi vpslld $2,%xmm6,%xmm6 xorl %ebx,%edi shldl $5,%ebp,%ebp addl %edi,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups -64(%r15),%xmm14 xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx addl 40(%rsp),%ecx andl %eax,%esi vpor %xmm8,%xmm6,%xmm6 xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%edi xorl %eax,%esi shldl $5,%edx,%edx addl %esi,%ecx xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 44(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp shrdl $7,%edx,%edx vaesenc %xmm14,%xmm12,%xmm12 vmovups -48(%r15),%xmm15 movl %ecx,%esi xorl %ebp,%edi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx vpalignr $8,%xmm5,%xmm6,%xmm8 vpxor %xmm3,%xmm7,%xmm7 addl 48(%rsp),%eax andl %edx,%esi xorl %ebp,%edx shrdl $7,%ecx,%ecx vpxor %xmm0,%xmm7,%xmm7 movl %ebx,%edi xorl %edx,%esi vpaddd %xmm6,%xmm10,%xmm9 vmovdqa 48(%r11),%xmm10 shldl $5,%ebx,%ebx addl %esi,%eax vpxor %xmm8,%xmm7,%xmm7 xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 52(%rsp),%ebp vaesenc %xmm15,%xmm12,%xmm12 vmovups -32(%r15),%xmm14 vpsrld $30,%xmm7,%xmm8 vmovdqa %xmm9,32(%rsp) andl %ecx,%edi xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi vpslld $2,%xmm7,%xmm7 xorl %ecx,%edi shldl $5,%eax,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp addl 56(%rsp),%edx andl %ebx,%esi vpor %xmm8,%xmm7,%xmm7 xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%edi xorl %ebx,%esi shldl $5,%ebp,%ebp addl %esi,%edx vaesenc %xmm14,%xmm12,%xmm12 vmovups -16(%r15),%xmm15 xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 60(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%esi xorl %eax,%edi shldl $5,%edx,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx vpalignr $8,%xmm6,%xmm7,%xmm8 vpxor %xmm4,%xmm0,%xmm0 addl 0(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp shrdl $7,%edx,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups 0(%r15),%xmm14 vpxor %xmm1,%xmm0,%xmm0 movl %ecx,%edi xorl %ebp,%esi vpaddd %xmm7,%xmm10,%xmm9 shldl $5,%ecx,%ecx addl %esi,%ebx vpxor %xmm8,%xmm0,%xmm0 xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 4(%rsp),%eax vpsrld $30,%xmm0,%xmm8 vmovdqa %xmm9,48(%rsp) andl %edx,%edi xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi vpslld $2,%xmm0,%xmm0 xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax addl 8(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 andl %ecx,%esi vpor %xmm8,%xmm0,%xmm0 xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%edi xorl %ecx,%esi shldl $5,%eax,%eax addl %esi,%ebp xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 12(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%esi xorl %ebx,%edi shldl $5,%ebp,%ebp addl %edi,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx vpalignr $8,%xmm7,%xmm0,%xmm8 vpxor %xmm5,%xmm1,%xmm1 addl 16(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax shrdl $7,%ebp,%ebp vpxor %xmm2,%xmm1,%xmm1 movl %edx,%edi xorl %eax,%esi vpaddd %xmm0,%xmm10,%xmm9 shldl $5,%edx,%edx addl %esi,%ecx vpxor %xmm8,%xmm1,%xmm1 xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 20(%rsp),%ebx vpsrld $30,%xmm1,%xmm8 vmovdqa %xmm9,0(%rsp) andl %ebp,%edi xorl %eax,%ebp shrdl $7,%edx,%edx vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 movl %ecx,%esi vpslld $2,%xmm1,%xmm1 xorl %ebp,%edi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx addl 24(%rsp),%eax andl %edx,%esi vpor %xmm8,%xmm1,%xmm1 xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%edi xorl %edx,%esi shldl $5,%ebx,%ebx addl %esi,%eax xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 28(%rsp),%ebp cmpl $11,%r8d jb L$vaesenclast8 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je L$vaesenclast8 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 L$vaesenclast8: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 andl %ecx,%edi xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi xorl %ecx,%edi shldl $5,%eax,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp vpalignr $8,%xmm0,%xmm1,%xmm8 vpxor %xmm6,%xmm2,%xmm2 addl 32(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx shrdl $7,%eax,%eax vpxor %xmm3,%xmm2,%xmm2 movl %ebp,%edi xorl %ebx,%esi vpaddd %xmm1,%xmm10,%xmm9 shldl $5,%ebp,%ebp addl %esi,%edx vmovdqu 48(%r12),%xmm13 vpxor %xmm15,%xmm13,%xmm13 vmovups %xmm12,32(%r13,%r12,1) vpxor %xmm13,%xmm12,%xmm12 vaesenc %xmm14,%xmm12,%xmm12 vmovups -80(%r15),%xmm15 vpxor %xmm8,%xmm2,%xmm2 xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 36(%rsp),%ecx vpsrld $30,%xmm2,%xmm8 vmovdqa %xmm9,16(%rsp) andl %eax,%edi xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%esi vpslld $2,%xmm2,%xmm2 xorl %eax,%edi shldl $5,%edx,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx addl 40(%rsp),%ebx andl %ebp,%esi vpor %xmm8,%xmm2,%xmm2 xorl %eax,%ebp shrdl $7,%edx,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups -64(%r15),%xmm14 movl %ecx,%edi xorl %ebp,%esi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 44(%rsp),%eax andl %edx,%edi xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi addl %ebx,%eax vpalignr $8,%xmm1,%xmm2,%xmm8 vpxor %xmm7,%xmm3,%xmm3 addl 48(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups -48(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax vpxor %xmm4,%xmm3,%xmm3 addl %esi,%ebp xorl %ecx,%edi vpaddd %xmm2,%xmm10,%xmm9 shrdl $7,%ebx,%ebx addl %eax,%ebp vpxor %xmm8,%xmm3,%xmm3 addl 52(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp vpsrld $30,%xmm3,%xmm8 vmovdqa %xmm9,32(%rsp) addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpslld $2,%xmm3,%xmm3 addl 56(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups -32(%r15),%xmm14 xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vpor %xmm8,%xmm3,%xmm3 addl 60(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 0(%rsp),%eax vpaddd %xmm3,%xmm10,%xmm9 xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax vmovdqa %xmm9,48(%rsp) xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 4(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups -16(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 8(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx addl 12(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 0(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx cmpq %r14,%r10 je L$done_avx vmovdqa 64(%r11),%xmm9 vmovdqa 0(%r11),%xmm10 vmovdqu 0(%r10),%xmm0 vmovdqu 16(%r10),%xmm1 vmovdqu 32(%r10),%xmm2 vmovdqu 48(%r10),%xmm3 vpshufb %xmm9,%xmm0,%xmm0 addq $64,%r10 addl 16(%rsp),%ebx xorl %ebp,%esi vpshufb %xmm9,%xmm1,%xmm1 movl %ecx,%edi shldl $5,%ecx,%ecx vpaddd %xmm10,%xmm0,%xmm8 addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx vmovdqa %xmm8,0(%rsp) addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi vpshufb %xmm9,%xmm2,%xmm2 movl %edx,%edi shldl $5,%edx,%edx vpaddd %xmm10,%xmm1,%xmm8 addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vmovdqa %xmm8,16(%rsp) addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi vpshufb %xmm9,%xmm3,%xmm3 movl %ebp,%edi shldl $5,%ebp,%ebp vpaddd %xmm10,%xmm2,%xmm8 addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx vmovdqa %xmm8,32(%rsp) addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx cmpl $11,%r8d jb L$vaesenclast9 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je L$vaesenclast9 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 L$vaesenclast9: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax vmovups %xmm12,48(%r13,%r12,1) leaq 64(%r12),%r12 addl 0(%r9),%eax addl 4(%r9),%esi addl 8(%r9),%ecx addl 12(%r9),%edx movl %eax,0(%r9) addl 16(%r9),%ebp movl %esi,4(%r9) movl %esi,%ebx movl %ecx,8(%r9) movl %ecx,%edi movl %edx,12(%r9) xorl %edx,%edi movl %ebp,16(%r9) andl %edi,%esi jmp L$oop_avx L$done_avx: addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx cmpl $11,%r8d jb L$vaesenclast10 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je L$vaesenclast10 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 L$vaesenclast10: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax vmovups %xmm12,48(%r13,%r12,1) movq 88(%rsp),%r8 addl 0(%r9),%eax addl 4(%r9),%esi addl 8(%r9),%ecx movl %eax,0(%r9) addl 12(%r9),%edx movl %esi,4(%r9) addl 16(%r9),%ebp movl %ecx,8(%r9) movl %edx,12(%r9) movl %ebp,16(%r9) vmovups %xmm12,(%r8) vzeroall leaq 104(%rsp),%rsi movq 0(%rsi),%r15 movq 8(%rsi),%r14 movq 16(%rsi),%r13 movq 24(%rsi),%r12 movq 32(%rsi),%rbp movq 40(%rsi),%rbx leaq 48(%rsi),%rsp L$epilogue_avx: .byte 0xf3,0xc3 .section __DATA,__const .p2align 6 K_XX_XX: .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .byte 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,49,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .p2align 6 .p2align 5 aesni_cbc_sha1_enc_shaext: movq 8(%rsp),%r10 movdqu (%r9),%xmm8 movd 16(%r9),%xmm9 movdqa K_XX_XX+80(%rip),%xmm7 movl 240(%rcx),%r11d subq %rdi,%rsi movups (%rcx),%xmm15 movups (%r8),%xmm2 movups 16(%rcx),%xmm0 leaq 112(%rcx),%rcx pshufd $27,%xmm8,%xmm8 pshufd $27,%xmm9,%xmm9 jmp L$oop_shaext .p2align 4 L$oop_shaext: movups 0(%rdi),%xmm14 xorps %xmm15,%xmm14 xorps %xmm14,%xmm2 movups -80(%rcx),%xmm1 .byte 102,15,56,220,208 movdqu (%r10),%xmm3 movdqa %xmm9,%xmm12 .byte 102,15,56,0,223 movdqu 16(%r10),%xmm4 movdqa %xmm8,%xmm11 movups -64(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 102,15,56,0,231 paddd %xmm3,%xmm9 movdqu 32(%r10),%xmm5 leaq 64(%r10),%r10 pxor %xmm12,%xmm3 movups -48(%rcx),%xmm1 .byte 102,15,56,220,208 pxor %xmm12,%xmm3 movdqa %xmm8,%xmm10 .byte 102,15,56,0,239 .byte 69,15,58,204,193,0 .byte 68,15,56,200,212 movups -32(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 15,56,201,220 movdqu -16(%r10),%xmm6 movdqa %xmm8,%xmm9 .byte 102,15,56,0,247 movups -16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 69,15,58,204,194,0 .byte 68,15,56,200,205 pxor %xmm5,%xmm3 .byte 15,56,201,229 movups 0(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,0 .byte 68,15,56,200,214 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,222 pxor %xmm6,%xmm4 .byte 15,56,201,238 movups 32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,0 .byte 68,15,56,200,203 movups 48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,227 pxor %xmm3,%xmm5 .byte 15,56,201,243 cmpl $11,%r11d jb L$aesenclast11 movups 64(%rcx),%xmm0 .byte 102,15,56,220,209 movups 80(%rcx),%xmm1 .byte 102,15,56,220,208 je L$aesenclast11 movups 96(%rcx),%xmm0 .byte 102,15,56,220,209 movups 112(%rcx),%xmm1 .byte 102,15,56,220,208 L$aesenclast11: .byte 102,15,56,221,209 movups 16-112(%rcx),%xmm0 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,0 .byte 68,15,56,200,212 movups 16(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,0(%rsi,%rdi,1) xorps %xmm14,%xmm2 movups -80(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,236 pxor %xmm4,%xmm6 .byte 15,56,201,220 movups -64(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,1 .byte 68,15,56,200,205 movups -48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,245 pxor %xmm5,%xmm3 .byte 15,56,201,229 movups -32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,1 .byte 68,15,56,200,214 movups -16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,222 pxor %xmm6,%xmm4 .byte 15,56,201,238 movups 0(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,1 .byte 68,15,56,200,203 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,227 pxor %xmm3,%xmm5 .byte 15,56,201,243 movups 32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,1 .byte 68,15,56,200,212 movups 48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,236 pxor %xmm4,%xmm6 .byte 15,56,201,220 cmpl $11,%r11d jb L$aesenclast12 movups 64(%rcx),%xmm0 .byte 102,15,56,220,209 movups 80(%rcx),%xmm1 .byte 102,15,56,220,208 je L$aesenclast12 movups 96(%rcx),%xmm0 .byte 102,15,56,220,209 movups 112(%rcx),%xmm1 .byte 102,15,56,220,208 L$aesenclast12: .byte 102,15,56,221,209 movups 16-112(%rcx),%xmm0 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,1 .byte 68,15,56,200,205 movups 32(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,16(%rsi,%rdi,1) xorps %xmm14,%xmm2 movups -80(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,245 pxor %xmm5,%xmm3 .byte 15,56,201,229 movups -64(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,2 .byte 68,15,56,200,214 movups -48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,222 pxor %xmm6,%xmm4 .byte 15,56,201,238 movups -32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,2 .byte 68,15,56,200,203 movups -16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,227 pxor %xmm3,%xmm5 .byte 15,56,201,243 movups 0(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,2 .byte 68,15,56,200,212 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,236 pxor %xmm4,%xmm6 .byte 15,56,201,220 movups 32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,2 .byte 68,15,56,200,205 movups 48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,245 pxor %xmm5,%xmm3 .byte 15,56,201,229 cmpl $11,%r11d jb L$aesenclast13 movups 64(%rcx),%xmm0 .byte 102,15,56,220,209 movups 80(%rcx),%xmm1 .byte 102,15,56,220,208 je L$aesenclast13 movups 96(%rcx),%xmm0 .byte 102,15,56,220,209 movups 112(%rcx),%xmm1 .byte 102,15,56,220,208 L$aesenclast13: .byte 102,15,56,221,209 movups 16-112(%rcx),%xmm0 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,2 .byte 68,15,56,200,214 movups 48(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,32(%rsi,%rdi,1) xorps %xmm14,%xmm2 movups -80(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,222 pxor %xmm6,%xmm4 .byte 15,56,201,238 movups -64(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,3 .byte 68,15,56,200,203 movups -48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,227 pxor %xmm3,%xmm5 .byte 15,56,201,243 movups -32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,3 .byte 68,15,56,200,212 .byte 15,56,202,236 pxor %xmm4,%xmm6 movups -16(%rcx),%xmm1 .byte 102,15,56,220,208 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,3 .byte 68,15,56,200,205 .byte 15,56,202,245 movups 0(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm12,%xmm5 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,3 .byte 68,15,56,200,214 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,3 .byte 68,15,56,200,205 movups 32(%rcx),%xmm0 .byte 102,15,56,220,209 movups 48(%rcx),%xmm1 .byte 102,15,56,220,208 cmpl $11,%r11d jb L$aesenclast14 movups 64(%rcx),%xmm0 .byte 102,15,56,220,209 movups 80(%rcx),%xmm1 .byte 102,15,56,220,208 je L$aesenclast14 movups 96(%rcx),%xmm0 .byte 102,15,56,220,209 movups 112(%rcx),%xmm1 .byte 102,15,56,220,208 L$aesenclast14: .byte 102,15,56,221,209 movups 16-112(%rcx),%xmm0 decq %rdx paddd %xmm11,%xmm8 movups %xmm2,48(%rsi,%rdi,1) leaq 64(%rdi),%rdi jnz L$oop_shaext pshufd $27,%xmm8,%xmm8 pshufd $27,%xmm9,%xmm9 movups %xmm2,(%r8) movdqu %xmm8,(%r9) movd %xmm9,16(%r9) .byte 0xf3,0xc3 #endif
marvin-hansen/iggy-streaming-system
10,961
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/test/trampoline-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .type abi_test_trampoline, @function .globl abi_test_trampoline .hidden abi_test_trampoline .align 16 abi_test_trampoline: .cfi_startproc _CET_ENDBR subq $120,%rsp .cfi_adjust_cfa_offset 120 movq %r8,48(%rsp) movq %rbx,64(%rsp) .cfi_offset rbx, -64 movq %rbp,72(%rsp) .cfi_offset rbp, -56 movq %r12,80(%rsp) .cfi_offset r12, -48 movq %r13,88(%rsp) .cfi_offset r13, -40 movq %r14,96(%rsp) .cfi_offset r14, -32 movq %r15,104(%rsp) .cfi_offset r15, -24 movq 0(%rsi),%rbx movq 8(%rsi),%rbp movq 16(%rsi),%r12 movq 24(%rsi),%r13 movq 32(%rsi),%r14 movq 40(%rsi),%r15 movq %rdi,32(%rsp) movq %rsi,40(%rsp) movq %rdx,%r10 movq %rcx,%r11 decq %r11 js .Largs_done movq (%r10),%rdi addq $8,%r10 decq %r11 js .Largs_done movq (%r10),%rsi addq $8,%r10 decq %r11 js .Largs_done movq (%r10),%rdx addq $8,%r10 decq %r11 js .Largs_done movq (%r10),%rcx addq $8,%r10 decq %r11 js .Largs_done movq (%r10),%r8 addq $8,%r10 decq %r11 js .Largs_done movq (%r10),%r9 addq $8,%r10 leaq 0(%rsp),%rax .Largs_loop: decq %r11 js .Largs_done movq %r11,56(%rsp) movq (%r10),%r11 movq %r11,(%rax) movq 56(%rsp),%r11 addq $8,%r10 addq $8,%rax jmp .Largs_loop .Largs_done: movq 32(%rsp),%rax movq 48(%rsp),%r10 testq %r10,%r10 jz .Lno_unwind pushfq orq $0x100,0(%rsp) popfq nop .globl abi_test_unwind_start .hidden abi_test_unwind_start abi_test_unwind_start: call *%rax .globl abi_test_unwind_return .hidden abi_test_unwind_return abi_test_unwind_return: pushfq andq $-0x101,0(%rsp) popfq .globl abi_test_unwind_stop .hidden abi_test_unwind_stop abi_test_unwind_stop: jmp .Lcall_done .Lno_unwind: call *%rax .Lcall_done: movq 40(%rsp),%rsi movq %rbx,0(%rsi) movq %rbp,8(%rsi) movq %r12,16(%rsi) movq %r13,24(%rsi) movq %r14,32(%rsi) movq %r15,40(%rsi) movq 64(%rsp),%rbx .cfi_restore rbx movq 72(%rsp),%rbp .cfi_restore rbp movq 80(%rsp),%r12 .cfi_restore r12 movq 88(%rsp),%r13 .cfi_restore r13 movq 96(%rsp),%r14 .cfi_restore r14 movq 104(%rsp),%r15 .cfi_restore r15 addq $120,%rsp .cfi_adjust_cfa_offset -120 .byte 0xf3,0xc3 .cfi_endproc .size abi_test_trampoline,.-abi_test_trampoline .type abi_test_clobber_rax, @function .globl abi_test_clobber_rax .hidden abi_test_clobber_rax .align 16 abi_test_clobber_rax: _CET_ENDBR xorq %rax,%rax .byte 0xf3,0xc3 .size abi_test_clobber_rax,.-abi_test_clobber_rax .type abi_test_clobber_rbx, @function .globl abi_test_clobber_rbx .hidden abi_test_clobber_rbx .align 16 abi_test_clobber_rbx: _CET_ENDBR xorq %rbx,%rbx .byte 0xf3,0xc3 .size abi_test_clobber_rbx,.-abi_test_clobber_rbx .type abi_test_clobber_rcx, @function .globl abi_test_clobber_rcx .hidden abi_test_clobber_rcx .align 16 abi_test_clobber_rcx: _CET_ENDBR xorq %rcx,%rcx .byte 0xf3,0xc3 .size abi_test_clobber_rcx,.-abi_test_clobber_rcx .type abi_test_clobber_rdx, @function .globl abi_test_clobber_rdx .hidden abi_test_clobber_rdx .align 16 abi_test_clobber_rdx: _CET_ENDBR xorq %rdx,%rdx .byte 0xf3,0xc3 .size abi_test_clobber_rdx,.-abi_test_clobber_rdx .type abi_test_clobber_rdi, @function .globl abi_test_clobber_rdi .hidden abi_test_clobber_rdi .align 16 abi_test_clobber_rdi: _CET_ENDBR xorq %rdi,%rdi .byte 0xf3,0xc3 .size abi_test_clobber_rdi,.-abi_test_clobber_rdi .type abi_test_clobber_rsi, @function .globl abi_test_clobber_rsi .hidden abi_test_clobber_rsi .align 16 abi_test_clobber_rsi: _CET_ENDBR xorq %rsi,%rsi .byte 0xf3,0xc3 .size abi_test_clobber_rsi,.-abi_test_clobber_rsi .type abi_test_clobber_rbp, @function .globl abi_test_clobber_rbp .hidden abi_test_clobber_rbp .align 16 abi_test_clobber_rbp: _CET_ENDBR xorq %rbp,%rbp .byte 0xf3,0xc3 .size abi_test_clobber_rbp,.-abi_test_clobber_rbp .type abi_test_clobber_r8, @function .globl abi_test_clobber_r8 .hidden abi_test_clobber_r8 .align 16 abi_test_clobber_r8: _CET_ENDBR xorq %r8,%r8 .byte 0xf3,0xc3 .size abi_test_clobber_r8,.-abi_test_clobber_r8 .type abi_test_clobber_r9, @function .globl abi_test_clobber_r9 .hidden abi_test_clobber_r9 .align 16 abi_test_clobber_r9: _CET_ENDBR xorq %r9,%r9 .byte 0xf3,0xc3 .size abi_test_clobber_r9,.-abi_test_clobber_r9 .type abi_test_clobber_r10, @function .globl abi_test_clobber_r10 .hidden abi_test_clobber_r10 .align 16 abi_test_clobber_r10: _CET_ENDBR xorq %r10,%r10 .byte 0xf3,0xc3 .size abi_test_clobber_r10,.-abi_test_clobber_r10 .type abi_test_clobber_r11, @function .globl abi_test_clobber_r11 .hidden abi_test_clobber_r11 .align 16 abi_test_clobber_r11: _CET_ENDBR xorq %r11,%r11 .byte 0xf3,0xc3 .size abi_test_clobber_r11,.-abi_test_clobber_r11 .type abi_test_clobber_r12, @function .globl abi_test_clobber_r12 .hidden abi_test_clobber_r12 .align 16 abi_test_clobber_r12: _CET_ENDBR xorq %r12,%r12 .byte 0xf3,0xc3 .size abi_test_clobber_r12,.-abi_test_clobber_r12 .type abi_test_clobber_r13, @function .globl abi_test_clobber_r13 .hidden abi_test_clobber_r13 .align 16 abi_test_clobber_r13: _CET_ENDBR xorq %r13,%r13 .byte 0xf3,0xc3 .size abi_test_clobber_r13,.-abi_test_clobber_r13 .type abi_test_clobber_r14, @function .globl abi_test_clobber_r14 .hidden abi_test_clobber_r14 .align 16 abi_test_clobber_r14: _CET_ENDBR xorq %r14,%r14 .byte 0xf3,0xc3 .size abi_test_clobber_r14,.-abi_test_clobber_r14 .type abi_test_clobber_r15, @function .globl abi_test_clobber_r15 .hidden abi_test_clobber_r15 .align 16 abi_test_clobber_r15: _CET_ENDBR xorq %r15,%r15 .byte 0xf3,0xc3 .size abi_test_clobber_r15,.-abi_test_clobber_r15 .type abi_test_clobber_xmm0, @function .globl abi_test_clobber_xmm0 .hidden abi_test_clobber_xmm0 .align 16 abi_test_clobber_xmm0: _CET_ENDBR pxor %xmm0,%xmm0 .byte 0xf3,0xc3 .size abi_test_clobber_xmm0,.-abi_test_clobber_xmm0 .type abi_test_clobber_xmm1, @function .globl abi_test_clobber_xmm1 .hidden abi_test_clobber_xmm1 .align 16 abi_test_clobber_xmm1: _CET_ENDBR pxor %xmm1,%xmm1 .byte 0xf3,0xc3 .size abi_test_clobber_xmm1,.-abi_test_clobber_xmm1 .type abi_test_clobber_xmm2, @function .globl abi_test_clobber_xmm2 .hidden abi_test_clobber_xmm2 .align 16 abi_test_clobber_xmm2: _CET_ENDBR pxor %xmm2,%xmm2 .byte 0xf3,0xc3 .size abi_test_clobber_xmm2,.-abi_test_clobber_xmm2 .type abi_test_clobber_xmm3, @function .globl abi_test_clobber_xmm3 .hidden abi_test_clobber_xmm3 .align 16 abi_test_clobber_xmm3: _CET_ENDBR pxor %xmm3,%xmm3 .byte 0xf3,0xc3 .size abi_test_clobber_xmm3,.-abi_test_clobber_xmm3 .type abi_test_clobber_xmm4, @function .globl abi_test_clobber_xmm4 .hidden abi_test_clobber_xmm4 .align 16 abi_test_clobber_xmm4: _CET_ENDBR pxor %xmm4,%xmm4 .byte 0xf3,0xc3 .size abi_test_clobber_xmm4,.-abi_test_clobber_xmm4 .type abi_test_clobber_xmm5, @function .globl abi_test_clobber_xmm5 .hidden abi_test_clobber_xmm5 .align 16 abi_test_clobber_xmm5: _CET_ENDBR pxor %xmm5,%xmm5 .byte 0xf3,0xc3 .size abi_test_clobber_xmm5,.-abi_test_clobber_xmm5 .type abi_test_clobber_xmm6, @function .globl abi_test_clobber_xmm6 .hidden abi_test_clobber_xmm6 .align 16 abi_test_clobber_xmm6: _CET_ENDBR pxor %xmm6,%xmm6 .byte 0xf3,0xc3 .size abi_test_clobber_xmm6,.-abi_test_clobber_xmm6 .type abi_test_clobber_xmm7, @function .globl abi_test_clobber_xmm7 .hidden abi_test_clobber_xmm7 .align 16 abi_test_clobber_xmm7: _CET_ENDBR pxor %xmm7,%xmm7 .byte 0xf3,0xc3 .size abi_test_clobber_xmm7,.-abi_test_clobber_xmm7 .type abi_test_clobber_xmm8, @function .globl abi_test_clobber_xmm8 .hidden abi_test_clobber_xmm8 .align 16 abi_test_clobber_xmm8: _CET_ENDBR pxor %xmm8,%xmm8 .byte 0xf3,0xc3 .size abi_test_clobber_xmm8,.-abi_test_clobber_xmm8 .type abi_test_clobber_xmm9, @function .globl abi_test_clobber_xmm9 .hidden abi_test_clobber_xmm9 .align 16 abi_test_clobber_xmm9: _CET_ENDBR pxor %xmm9,%xmm9 .byte 0xf3,0xc3 .size abi_test_clobber_xmm9,.-abi_test_clobber_xmm9 .type abi_test_clobber_xmm10, @function .globl abi_test_clobber_xmm10 .hidden abi_test_clobber_xmm10 .align 16 abi_test_clobber_xmm10: _CET_ENDBR pxor %xmm10,%xmm10 .byte 0xf3,0xc3 .size abi_test_clobber_xmm10,.-abi_test_clobber_xmm10 .type abi_test_clobber_xmm11, @function .globl abi_test_clobber_xmm11 .hidden abi_test_clobber_xmm11 .align 16 abi_test_clobber_xmm11: _CET_ENDBR pxor %xmm11,%xmm11 .byte 0xf3,0xc3 .size abi_test_clobber_xmm11,.-abi_test_clobber_xmm11 .type abi_test_clobber_xmm12, @function .globl abi_test_clobber_xmm12 .hidden abi_test_clobber_xmm12 .align 16 abi_test_clobber_xmm12: _CET_ENDBR pxor %xmm12,%xmm12 .byte 0xf3,0xc3 .size abi_test_clobber_xmm12,.-abi_test_clobber_xmm12 .type abi_test_clobber_xmm13, @function .globl abi_test_clobber_xmm13 .hidden abi_test_clobber_xmm13 .align 16 abi_test_clobber_xmm13: _CET_ENDBR pxor %xmm13,%xmm13 .byte 0xf3,0xc3 .size abi_test_clobber_xmm13,.-abi_test_clobber_xmm13 .type abi_test_clobber_xmm14, @function .globl abi_test_clobber_xmm14 .hidden abi_test_clobber_xmm14 .align 16 abi_test_clobber_xmm14: _CET_ENDBR pxor %xmm14,%xmm14 .byte 0xf3,0xc3 .size abi_test_clobber_xmm14,.-abi_test_clobber_xmm14 .type abi_test_clobber_xmm15, @function .globl abi_test_clobber_xmm15 .hidden abi_test_clobber_xmm15 .align 16 abi_test_clobber_xmm15: _CET_ENDBR pxor %xmm15,%xmm15 .byte 0xf3,0xc3 .size abi_test_clobber_xmm15,.-abi_test_clobber_xmm15 .type abi_test_bad_unwind_wrong_register, @function .globl abi_test_bad_unwind_wrong_register .hidden abi_test_bad_unwind_wrong_register .align 16 abi_test_bad_unwind_wrong_register: .cfi_startproc _CET_ENDBR pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-16 nop popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 .byte 0xf3,0xc3 .cfi_endproc .size abi_test_bad_unwind_wrong_register,.-abi_test_bad_unwind_wrong_register .type abi_test_bad_unwind_temporary, @function .globl abi_test_bad_unwind_temporary .hidden abi_test_bad_unwind_temporary .align 16 abi_test_bad_unwind_temporary: .cfi_startproc _CET_ENDBR pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-16 movq %r12,%rax incq %rax movq %rax,(%rsp) movq %r12,(%rsp) popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 .byte 0xf3,0xc3 .cfi_endproc .size abi_test_bad_unwind_temporary,.-abi_test_bad_unwind_temporary .type abi_test_set_direction_flag, @function .globl abi_test_get_and_clear_direction_flag .hidden abi_test_get_and_clear_direction_flag abi_test_get_and_clear_direction_flag: _CET_ENDBR pushfq popq %rax andq $0x400,%rax shrq $10,%rax cld .byte 0xf3,0xc3 .size abi_test_get_and_clear_direction_flag,.-abi_test_get_and_clear_direction_flag .type abi_test_set_direction_flag, @function .globl abi_test_set_direction_flag .hidden abi_test_set_direction_flag abi_test_set_direction_flag: _CET_ENDBR std .byte 0xf3,0xc3 .size abi_test_set_direction_flag,.-abi_test_set_direction_flag #endif
marvin-hansen/iggy-streaming-system
70,861
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/sha256-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .globl sha256_block_data_order_nohw .hidden sha256_block_data_order_nohw .type sha256_block_data_order_nohw,@function .align 16 sha256_block_data_order_nohw: .cfi_startproc _CET_ENDBR movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 shlq $4,%rdx subq $64+32,%rsp leaq (%rsi,%rdx,4),%rdx andq $-64,%rsp movq %rdi,64+0(%rsp) movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %rax,88(%rsp) .cfi_escape 0x0f,0x06,0x77,0xd8,0x00,0x06,0x23,0x08 .Lprologue: movl 0(%rdi),%eax movl 4(%rdi),%ebx movl 8(%rdi),%ecx movl 12(%rdi),%edx movl 16(%rdi),%r8d movl 20(%rdi),%r9d movl 24(%rdi),%r10d movl 28(%rdi),%r11d jmp .Lloop .align 16 .Lloop: movl %ebx,%edi leaq K256(%rip),%rbp xorl %ecx,%edi movl 0(%rsi),%r12d movl %r8d,%r13d movl %eax,%r14d bswapl %r12d rorl $14,%r13d movl %r9d,%r15d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r15d movl %r12d,0(%rsp) xorl %eax,%r14d andl %r8d,%r15d rorl $5,%r13d addl %r11d,%r12d xorl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r13d addl %r15d,%r12d movl %eax,%r15d addl (%rbp),%r12d xorl %eax,%r14d xorl %ebx,%r15d rorl $6,%r13d movl %ebx,%r11d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r11d addl %r12d,%edx addl %r12d,%r11d leaq 4(%rbp),%rbp addl %r14d,%r11d movl 4(%rsi),%r12d movl %edx,%r13d movl %r11d,%r14d bswapl %r12d rorl $14,%r13d movl %r8d,%edi xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%edi movl %r12d,4(%rsp) xorl %r11d,%r14d andl %edx,%edi rorl $5,%r13d addl %r10d,%r12d xorl %r9d,%edi rorl $11,%r14d xorl %edx,%r13d addl %edi,%r12d movl %r11d,%edi addl (%rbp),%r12d xorl %r11d,%r14d xorl %eax,%edi rorl $6,%r13d movl %eax,%r10d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r10d addl %r12d,%ecx addl %r12d,%r10d leaq 4(%rbp),%rbp addl %r14d,%r10d movl 8(%rsi),%r12d movl %ecx,%r13d movl %r10d,%r14d bswapl %r12d rorl $14,%r13d movl %edx,%r15d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r15d movl %r12d,8(%rsp) xorl %r10d,%r14d andl %ecx,%r15d rorl $5,%r13d addl %r9d,%r12d xorl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r15d addl (%rbp),%r12d xorl %r10d,%r14d xorl %r11d,%r15d rorl $6,%r13d movl %r11d,%r9d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r9d addl %r12d,%ebx addl %r12d,%r9d leaq 4(%rbp),%rbp addl %r14d,%r9d movl 12(%rsi),%r12d movl %ebx,%r13d movl %r9d,%r14d bswapl %r12d rorl $14,%r13d movl %ecx,%edi xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%edi movl %r12d,12(%rsp) xorl %r9d,%r14d andl %ebx,%edi rorl $5,%r13d addl %r8d,%r12d xorl %edx,%edi rorl $11,%r14d xorl %ebx,%r13d addl %edi,%r12d movl %r9d,%edi addl (%rbp),%r12d xorl %r9d,%r14d xorl %r10d,%edi rorl $6,%r13d movl %r10d,%r8d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r8d addl %r12d,%eax addl %r12d,%r8d leaq 20(%rbp),%rbp addl %r14d,%r8d movl 16(%rsi),%r12d movl %eax,%r13d movl %r8d,%r14d bswapl %r12d rorl $14,%r13d movl %ebx,%r15d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r15d movl %r12d,16(%rsp) xorl %r8d,%r14d andl %eax,%r15d rorl $5,%r13d addl %edx,%r12d xorl %ecx,%r15d rorl $11,%r14d xorl %eax,%r13d addl %r15d,%r12d movl %r8d,%r15d addl (%rbp),%r12d xorl %r8d,%r14d xorl %r9d,%r15d rorl $6,%r13d movl %r9d,%edx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%edx addl %r12d,%r11d addl %r12d,%edx leaq 4(%rbp),%rbp addl %r14d,%edx movl 20(%rsi),%r12d movl %r11d,%r13d movl %edx,%r14d bswapl %r12d rorl $14,%r13d movl %eax,%edi xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%edi movl %r12d,20(%rsp) xorl %edx,%r14d andl %r11d,%edi rorl $5,%r13d addl %ecx,%r12d xorl %ebx,%edi rorl $11,%r14d xorl %r11d,%r13d addl %edi,%r12d movl %edx,%edi addl (%rbp),%r12d xorl %edx,%r14d xorl %r8d,%edi rorl $6,%r13d movl %r8d,%ecx andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%ecx addl %r12d,%r10d addl %r12d,%ecx leaq 4(%rbp),%rbp addl %r14d,%ecx movl 24(%rsi),%r12d movl %r10d,%r13d movl %ecx,%r14d bswapl %r12d rorl $14,%r13d movl %r11d,%r15d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r15d movl %r12d,24(%rsp) xorl %ecx,%r14d andl %r10d,%r15d rorl $5,%r13d addl %ebx,%r12d xorl %eax,%r15d rorl $11,%r14d xorl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r15d addl (%rbp),%r12d xorl %ecx,%r14d xorl %edx,%r15d rorl $6,%r13d movl %edx,%ebx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%ebx addl %r12d,%r9d addl %r12d,%ebx leaq 4(%rbp),%rbp addl %r14d,%ebx movl 28(%rsi),%r12d movl %r9d,%r13d movl %ebx,%r14d bswapl %r12d rorl $14,%r13d movl %r10d,%edi xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%edi movl %r12d,28(%rsp) xorl %ebx,%r14d andl %r9d,%edi rorl $5,%r13d addl %eax,%r12d xorl %r11d,%edi rorl $11,%r14d xorl %r9d,%r13d addl %edi,%r12d movl %ebx,%edi addl (%rbp),%r12d xorl %ebx,%r14d xorl %ecx,%edi rorl $6,%r13d movl %ecx,%eax andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%eax addl %r12d,%r8d addl %r12d,%eax leaq 20(%rbp),%rbp addl %r14d,%eax movl 32(%rsi),%r12d movl %r8d,%r13d movl %eax,%r14d bswapl %r12d rorl $14,%r13d movl %r9d,%r15d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r15d movl %r12d,32(%rsp) xorl %eax,%r14d andl %r8d,%r15d rorl $5,%r13d addl %r11d,%r12d xorl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r13d addl %r15d,%r12d movl %eax,%r15d addl (%rbp),%r12d xorl %eax,%r14d xorl %ebx,%r15d rorl $6,%r13d movl %ebx,%r11d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r11d addl %r12d,%edx addl %r12d,%r11d leaq 4(%rbp),%rbp addl %r14d,%r11d movl 36(%rsi),%r12d movl %edx,%r13d movl %r11d,%r14d bswapl %r12d rorl $14,%r13d movl %r8d,%edi xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%edi movl %r12d,36(%rsp) xorl %r11d,%r14d andl %edx,%edi rorl $5,%r13d addl %r10d,%r12d xorl %r9d,%edi rorl $11,%r14d xorl %edx,%r13d addl %edi,%r12d movl %r11d,%edi addl (%rbp),%r12d xorl %r11d,%r14d xorl %eax,%edi rorl $6,%r13d movl %eax,%r10d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r10d addl %r12d,%ecx addl %r12d,%r10d leaq 4(%rbp),%rbp addl %r14d,%r10d movl 40(%rsi),%r12d movl %ecx,%r13d movl %r10d,%r14d bswapl %r12d rorl $14,%r13d movl %edx,%r15d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r15d movl %r12d,40(%rsp) xorl %r10d,%r14d andl %ecx,%r15d rorl $5,%r13d addl %r9d,%r12d xorl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r15d addl (%rbp),%r12d xorl %r10d,%r14d xorl %r11d,%r15d rorl $6,%r13d movl %r11d,%r9d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r9d addl %r12d,%ebx addl %r12d,%r9d leaq 4(%rbp),%rbp addl %r14d,%r9d movl 44(%rsi),%r12d movl %ebx,%r13d movl %r9d,%r14d bswapl %r12d rorl $14,%r13d movl %ecx,%edi xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%edi movl %r12d,44(%rsp) xorl %r9d,%r14d andl %ebx,%edi rorl $5,%r13d addl %r8d,%r12d xorl %edx,%edi rorl $11,%r14d xorl %ebx,%r13d addl %edi,%r12d movl %r9d,%edi addl (%rbp),%r12d xorl %r9d,%r14d xorl %r10d,%edi rorl $6,%r13d movl %r10d,%r8d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r8d addl %r12d,%eax addl %r12d,%r8d leaq 20(%rbp),%rbp addl %r14d,%r8d movl 48(%rsi),%r12d movl %eax,%r13d movl %r8d,%r14d bswapl %r12d rorl $14,%r13d movl %ebx,%r15d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r15d movl %r12d,48(%rsp) xorl %r8d,%r14d andl %eax,%r15d rorl $5,%r13d addl %edx,%r12d xorl %ecx,%r15d rorl $11,%r14d xorl %eax,%r13d addl %r15d,%r12d movl %r8d,%r15d addl (%rbp),%r12d xorl %r8d,%r14d xorl %r9d,%r15d rorl $6,%r13d movl %r9d,%edx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%edx addl %r12d,%r11d addl %r12d,%edx leaq 4(%rbp),%rbp addl %r14d,%edx movl 52(%rsi),%r12d movl %r11d,%r13d movl %edx,%r14d bswapl %r12d rorl $14,%r13d movl %eax,%edi xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%edi movl %r12d,52(%rsp) xorl %edx,%r14d andl %r11d,%edi rorl $5,%r13d addl %ecx,%r12d xorl %ebx,%edi rorl $11,%r14d xorl %r11d,%r13d addl %edi,%r12d movl %edx,%edi addl (%rbp),%r12d xorl %edx,%r14d xorl %r8d,%edi rorl $6,%r13d movl %r8d,%ecx andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%ecx addl %r12d,%r10d addl %r12d,%ecx leaq 4(%rbp),%rbp addl %r14d,%ecx movl 56(%rsi),%r12d movl %r10d,%r13d movl %ecx,%r14d bswapl %r12d rorl $14,%r13d movl %r11d,%r15d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r15d movl %r12d,56(%rsp) xorl %ecx,%r14d andl %r10d,%r15d rorl $5,%r13d addl %ebx,%r12d xorl %eax,%r15d rorl $11,%r14d xorl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r15d addl (%rbp),%r12d xorl %ecx,%r14d xorl %edx,%r15d rorl $6,%r13d movl %edx,%ebx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%ebx addl %r12d,%r9d addl %r12d,%ebx leaq 4(%rbp),%rbp addl %r14d,%ebx movl 60(%rsi),%r12d movl %r9d,%r13d movl %ebx,%r14d bswapl %r12d rorl $14,%r13d movl %r10d,%edi xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%edi movl %r12d,60(%rsp) xorl %ebx,%r14d andl %r9d,%edi rorl $5,%r13d addl %eax,%r12d xorl %r11d,%edi rorl $11,%r14d xorl %r9d,%r13d addl %edi,%r12d movl %ebx,%edi addl (%rbp),%r12d xorl %ebx,%r14d xorl %ecx,%edi rorl $6,%r13d movl %ecx,%eax andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%eax addl %r12d,%r8d addl %r12d,%eax leaq 20(%rbp),%rbp jmp .Lrounds_16_xx .align 16 .Lrounds_16_xx: movl 4(%rsp),%r13d movl 56(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%eax movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 36(%rsp),%r12d addl 0(%rsp),%r12d movl %r8d,%r13d addl %r15d,%r12d movl %eax,%r14d rorl $14,%r13d movl %r9d,%r15d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r15d movl %r12d,0(%rsp) xorl %eax,%r14d andl %r8d,%r15d rorl $5,%r13d addl %r11d,%r12d xorl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r13d addl %r15d,%r12d movl %eax,%r15d addl (%rbp),%r12d xorl %eax,%r14d xorl %ebx,%r15d rorl $6,%r13d movl %ebx,%r11d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r11d addl %r12d,%edx addl %r12d,%r11d leaq 4(%rbp),%rbp movl 8(%rsp),%r13d movl 60(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r11d movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 40(%rsp),%r12d addl 4(%rsp),%r12d movl %edx,%r13d addl %edi,%r12d movl %r11d,%r14d rorl $14,%r13d movl %r8d,%edi xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%edi movl %r12d,4(%rsp) xorl %r11d,%r14d andl %edx,%edi rorl $5,%r13d addl %r10d,%r12d xorl %r9d,%edi rorl $11,%r14d xorl %edx,%r13d addl %edi,%r12d movl %r11d,%edi addl (%rbp),%r12d xorl %r11d,%r14d xorl %eax,%edi rorl $6,%r13d movl %eax,%r10d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r10d addl %r12d,%ecx addl %r12d,%r10d leaq 4(%rbp),%rbp movl 12(%rsp),%r13d movl 0(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r10d movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 44(%rsp),%r12d addl 8(%rsp),%r12d movl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r14d rorl $14,%r13d movl %edx,%r15d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r15d movl %r12d,8(%rsp) xorl %r10d,%r14d andl %ecx,%r15d rorl $5,%r13d addl %r9d,%r12d xorl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r15d addl (%rbp),%r12d xorl %r10d,%r14d xorl %r11d,%r15d rorl $6,%r13d movl %r11d,%r9d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r9d addl %r12d,%ebx addl %r12d,%r9d leaq 4(%rbp),%rbp movl 16(%rsp),%r13d movl 4(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r9d movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 48(%rsp),%r12d addl 12(%rsp),%r12d movl %ebx,%r13d addl %edi,%r12d movl %r9d,%r14d rorl $14,%r13d movl %ecx,%edi xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%edi movl %r12d,12(%rsp) xorl %r9d,%r14d andl %ebx,%edi rorl $5,%r13d addl %r8d,%r12d xorl %edx,%edi rorl $11,%r14d xorl %ebx,%r13d addl %edi,%r12d movl %r9d,%edi addl (%rbp),%r12d xorl %r9d,%r14d xorl %r10d,%edi rorl $6,%r13d movl %r10d,%r8d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r8d addl %r12d,%eax addl %r12d,%r8d leaq 20(%rbp),%rbp movl 20(%rsp),%r13d movl 8(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r8d movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 52(%rsp),%r12d addl 16(%rsp),%r12d movl %eax,%r13d addl %r15d,%r12d movl %r8d,%r14d rorl $14,%r13d movl %ebx,%r15d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r15d movl %r12d,16(%rsp) xorl %r8d,%r14d andl %eax,%r15d rorl $5,%r13d addl %edx,%r12d xorl %ecx,%r15d rorl $11,%r14d xorl %eax,%r13d addl %r15d,%r12d movl %r8d,%r15d addl (%rbp),%r12d xorl %r8d,%r14d xorl %r9d,%r15d rorl $6,%r13d movl %r9d,%edx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%edx addl %r12d,%r11d addl %r12d,%edx leaq 4(%rbp),%rbp movl 24(%rsp),%r13d movl 12(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%edx movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 56(%rsp),%r12d addl 20(%rsp),%r12d movl %r11d,%r13d addl %edi,%r12d movl %edx,%r14d rorl $14,%r13d movl %eax,%edi xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%edi movl %r12d,20(%rsp) xorl %edx,%r14d andl %r11d,%edi rorl $5,%r13d addl %ecx,%r12d xorl %ebx,%edi rorl $11,%r14d xorl %r11d,%r13d addl %edi,%r12d movl %edx,%edi addl (%rbp),%r12d xorl %edx,%r14d xorl %r8d,%edi rorl $6,%r13d movl %r8d,%ecx andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%ecx addl %r12d,%r10d addl %r12d,%ecx leaq 4(%rbp),%rbp movl 28(%rsp),%r13d movl 16(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%ecx movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 60(%rsp),%r12d addl 24(%rsp),%r12d movl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r14d rorl $14,%r13d movl %r11d,%r15d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r15d movl %r12d,24(%rsp) xorl %ecx,%r14d andl %r10d,%r15d rorl $5,%r13d addl %ebx,%r12d xorl %eax,%r15d rorl $11,%r14d xorl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r15d addl (%rbp),%r12d xorl %ecx,%r14d xorl %edx,%r15d rorl $6,%r13d movl %edx,%ebx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%ebx addl %r12d,%r9d addl %r12d,%ebx leaq 4(%rbp),%rbp movl 32(%rsp),%r13d movl 20(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%ebx movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 0(%rsp),%r12d addl 28(%rsp),%r12d movl %r9d,%r13d addl %edi,%r12d movl %ebx,%r14d rorl $14,%r13d movl %r10d,%edi xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%edi movl %r12d,28(%rsp) xorl %ebx,%r14d andl %r9d,%edi rorl $5,%r13d addl %eax,%r12d xorl %r11d,%edi rorl $11,%r14d xorl %r9d,%r13d addl %edi,%r12d movl %ebx,%edi addl (%rbp),%r12d xorl %ebx,%r14d xorl %ecx,%edi rorl $6,%r13d movl %ecx,%eax andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%eax addl %r12d,%r8d addl %r12d,%eax leaq 20(%rbp),%rbp movl 36(%rsp),%r13d movl 24(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%eax movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 4(%rsp),%r12d addl 32(%rsp),%r12d movl %r8d,%r13d addl %r15d,%r12d movl %eax,%r14d rorl $14,%r13d movl %r9d,%r15d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r15d movl %r12d,32(%rsp) xorl %eax,%r14d andl %r8d,%r15d rorl $5,%r13d addl %r11d,%r12d xorl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r13d addl %r15d,%r12d movl %eax,%r15d addl (%rbp),%r12d xorl %eax,%r14d xorl %ebx,%r15d rorl $6,%r13d movl %ebx,%r11d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r11d addl %r12d,%edx addl %r12d,%r11d leaq 4(%rbp),%rbp movl 40(%rsp),%r13d movl 28(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r11d movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 8(%rsp),%r12d addl 36(%rsp),%r12d movl %edx,%r13d addl %edi,%r12d movl %r11d,%r14d rorl $14,%r13d movl %r8d,%edi xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%edi movl %r12d,36(%rsp) xorl %r11d,%r14d andl %edx,%edi rorl $5,%r13d addl %r10d,%r12d xorl %r9d,%edi rorl $11,%r14d xorl %edx,%r13d addl %edi,%r12d movl %r11d,%edi addl (%rbp),%r12d xorl %r11d,%r14d xorl %eax,%edi rorl $6,%r13d movl %eax,%r10d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r10d addl %r12d,%ecx addl %r12d,%r10d leaq 4(%rbp),%rbp movl 44(%rsp),%r13d movl 32(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r10d movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 12(%rsp),%r12d addl 40(%rsp),%r12d movl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r14d rorl $14,%r13d movl %edx,%r15d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r15d movl %r12d,40(%rsp) xorl %r10d,%r14d andl %ecx,%r15d rorl $5,%r13d addl %r9d,%r12d xorl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r13d addl %r15d,%r12d movl %r10d,%r15d addl (%rbp),%r12d xorl %r10d,%r14d xorl %r11d,%r15d rorl $6,%r13d movl %r11d,%r9d andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%r9d addl %r12d,%ebx addl %r12d,%r9d leaq 4(%rbp),%rbp movl 48(%rsp),%r13d movl 36(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r9d movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 16(%rsp),%r12d addl 44(%rsp),%r12d movl %ebx,%r13d addl %edi,%r12d movl %r9d,%r14d rorl $14,%r13d movl %ecx,%edi xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%edi movl %r12d,44(%rsp) xorl %r9d,%r14d andl %ebx,%edi rorl $5,%r13d addl %r8d,%r12d xorl %edx,%edi rorl $11,%r14d xorl %ebx,%r13d addl %edi,%r12d movl %r9d,%edi addl (%rbp),%r12d xorl %r9d,%r14d xorl %r10d,%edi rorl $6,%r13d movl %r10d,%r8d andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%r8d addl %r12d,%eax addl %r12d,%r8d leaq 20(%rbp),%rbp movl 52(%rsp),%r13d movl 40(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%r8d movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 20(%rsp),%r12d addl 48(%rsp),%r12d movl %eax,%r13d addl %r15d,%r12d movl %r8d,%r14d rorl $14,%r13d movl %ebx,%r15d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r15d movl %r12d,48(%rsp) xorl %r8d,%r14d andl %eax,%r15d rorl $5,%r13d addl %edx,%r12d xorl %ecx,%r15d rorl $11,%r14d xorl %eax,%r13d addl %r15d,%r12d movl %r8d,%r15d addl (%rbp),%r12d xorl %r8d,%r14d xorl %r9d,%r15d rorl $6,%r13d movl %r9d,%edx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%edx addl %r12d,%r11d addl %r12d,%edx leaq 4(%rbp),%rbp movl 56(%rsp),%r13d movl 44(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%edx movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 24(%rsp),%r12d addl 52(%rsp),%r12d movl %r11d,%r13d addl %edi,%r12d movl %edx,%r14d rorl $14,%r13d movl %eax,%edi xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%edi movl %r12d,52(%rsp) xorl %edx,%r14d andl %r11d,%edi rorl $5,%r13d addl %ecx,%r12d xorl %ebx,%edi rorl $11,%r14d xorl %r11d,%r13d addl %edi,%r12d movl %edx,%edi addl (%rbp),%r12d xorl %edx,%r14d xorl %r8d,%edi rorl $6,%r13d movl %r8d,%ecx andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%ecx addl %r12d,%r10d addl %r12d,%ecx leaq 4(%rbp),%rbp movl 60(%rsp),%r13d movl 48(%rsp),%r15d movl %r13d,%r12d rorl $11,%r13d addl %r14d,%ecx movl %r15d,%r14d rorl $2,%r15d xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%r15d shrl $10,%r14d rorl $17,%r15d xorl %r13d,%r12d xorl %r14d,%r15d addl 28(%rsp),%r12d addl 56(%rsp),%r12d movl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r14d rorl $14,%r13d movl %r11d,%r15d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r15d movl %r12d,56(%rsp) xorl %ecx,%r14d andl %r10d,%r15d rorl $5,%r13d addl %ebx,%r12d xorl %eax,%r15d rorl $11,%r14d xorl %r10d,%r13d addl %r15d,%r12d movl %ecx,%r15d addl (%rbp),%r12d xorl %ecx,%r14d xorl %edx,%r15d rorl $6,%r13d movl %edx,%ebx andl %r15d,%edi rorl $2,%r14d addl %r13d,%r12d xorl %edi,%ebx addl %r12d,%r9d addl %r12d,%ebx leaq 4(%rbp),%rbp movl 0(%rsp),%r13d movl 52(%rsp),%edi movl %r13d,%r12d rorl $11,%r13d addl %r14d,%ebx movl %edi,%r14d rorl $2,%edi xorl %r12d,%r13d shrl $3,%r12d rorl $7,%r13d xorl %r14d,%edi shrl $10,%r14d rorl $17,%edi xorl %r13d,%r12d xorl %r14d,%edi addl 32(%rsp),%r12d addl 60(%rsp),%r12d movl %r9d,%r13d addl %edi,%r12d movl %ebx,%r14d rorl $14,%r13d movl %r10d,%edi xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%edi movl %r12d,60(%rsp) xorl %ebx,%r14d andl %r9d,%edi rorl $5,%r13d addl %eax,%r12d xorl %r11d,%edi rorl $11,%r14d xorl %r9d,%r13d addl %edi,%r12d movl %ebx,%edi addl (%rbp),%r12d xorl %ebx,%r14d xorl %ecx,%edi rorl $6,%r13d movl %ecx,%eax andl %edi,%r15d rorl $2,%r14d addl %r13d,%r12d xorl %r15d,%eax addl %r12d,%r8d addl %r12d,%eax leaq 20(%rbp),%rbp cmpb $0,3(%rbp) jnz .Lrounds_16_xx movq 64+0(%rsp),%rdi addl %r14d,%eax leaq 64(%rsi),%rsi addl 0(%rdi),%eax addl 4(%rdi),%ebx addl 8(%rdi),%ecx addl 12(%rdi),%edx addl 16(%rdi),%r8d addl 20(%rdi),%r9d addl 24(%rdi),%r10d addl 28(%rdi),%r11d cmpq 64+16(%rsp),%rsi movl %eax,0(%rdi) movl %ebx,4(%rdi) movl %ecx,8(%rdi) movl %edx,12(%rdi) movl %r8d,16(%rdi) movl %r9d,20(%rdi) movl %r10d,24(%rdi) movl %r11d,28(%rdi) jb .Lloop movq 88(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue: .byte 0xf3,0xc3 .cfi_endproc .size sha256_block_data_order_nohw,.-sha256_block_data_order_nohw .section .rodata .align 64 .type K256,@object K256: .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff .long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff .long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908 .long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908 .byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .globl sha256_block_data_order_hw .hidden sha256_block_data_order_hw .type sha256_block_data_order_hw,@function .align 64 sha256_block_data_order_hw: .cfi_startproc #ifdef BORINGSSL_DISPATCH_TEST .extern BORINGSSL_function_hit .hidden BORINGSSL_function_hit movb $1,BORINGSSL_function_hit+6(%rip) #endif _CET_ENDBR leaq K256+128(%rip),%rcx movdqu (%rdi),%xmm1 movdqu 16(%rdi),%xmm2 movdqa 512-128(%rcx),%xmm7 pshufd $0x1b,%xmm1,%xmm0 pshufd $0xb1,%xmm1,%xmm1 pshufd $0x1b,%xmm2,%xmm2 movdqa %xmm7,%xmm8 .byte 102,15,58,15,202,8 punpcklqdq %xmm0,%xmm2 jmp .Loop_shaext .align 16 .Loop_shaext: movdqu (%rsi),%xmm3 movdqu 16(%rsi),%xmm4 movdqu 32(%rsi),%xmm5 .byte 102,15,56,0,223 movdqu 48(%rsi),%xmm6 movdqa 0-128(%rcx),%xmm0 paddd %xmm3,%xmm0 .byte 102,15,56,0,231 movdqa %xmm2,%xmm10 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 nop movdqa %xmm1,%xmm9 .byte 15,56,203,202 movdqa 32-128(%rcx),%xmm0 paddd %xmm4,%xmm0 .byte 102,15,56,0,239 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 leaq 64(%rsi),%rsi .byte 15,56,204,220 .byte 15,56,203,202 movdqa 64-128(%rcx),%xmm0 paddd %xmm5,%xmm0 .byte 102,15,56,0,247 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm6,%xmm7 .byte 102,15,58,15,253,4 nop paddd %xmm7,%xmm3 .byte 15,56,204,229 .byte 15,56,203,202 movdqa 96-128(%rcx),%xmm0 paddd %xmm6,%xmm0 .byte 15,56,205,222 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm3,%xmm7 .byte 102,15,58,15,254,4 nop paddd %xmm7,%xmm4 .byte 15,56,204,238 .byte 15,56,203,202 movdqa 128-128(%rcx),%xmm0 paddd %xmm3,%xmm0 .byte 15,56,205,227 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm4,%xmm7 .byte 102,15,58,15,251,4 nop paddd %xmm7,%xmm5 .byte 15,56,204,243 .byte 15,56,203,202 movdqa 160-128(%rcx),%xmm0 paddd %xmm4,%xmm0 .byte 15,56,205,236 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm5,%xmm7 .byte 102,15,58,15,252,4 nop paddd %xmm7,%xmm6 .byte 15,56,204,220 .byte 15,56,203,202 movdqa 192-128(%rcx),%xmm0 paddd %xmm5,%xmm0 .byte 15,56,205,245 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm6,%xmm7 .byte 102,15,58,15,253,4 nop paddd %xmm7,%xmm3 .byte 15,56,204,229 .byte 15,56,203,202 movdqa 224-128(%rcx),%xmm0 paddd %xmm6,%xmm0 .byte 15,56,205,222 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm3,%xmm7 .byte 102,15,58,15,254,4 nop paddd %xmm7,%xmm4 .byte 15,56,204,238 .byte 15,56,203,202 movdqa 256-128(%rcx),%xmm0 paddd %xmm3,%xmm0 .byte 15,56,205,227 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm4,%xmm7 .byte 102,15,58,15,251,4 nop paddd %xmm7,%xmm5 .byte 15,56,204,243 .byte 15,56,203,202 movdqa 288-128(%rcx),%xmm0 paddd %xmm4,%xmm0 .byte 15,56,205,236 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm5,%xmm7 .byte 102,15,58,15,252,4 nop paddd %xmm7,%xmm6 .byte 15,56,204,220 .byte 15,56,203,202 movdqa 320-128(%rcx),%xmm0 paddd %xmm5,%xmm0 .byte 15,56,205,245 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm6,%xmm7 .byte 102,15,58,15,253,4 nop paddd %xmm7,%xmm3 .byte 15,56,204,229 .byte 15,56,203,202 movdqa 352-128(%rcx),%xmm0 paddd %xmm6,%xmm0 .byte 15,56,205,222 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm3,%xmm7 .byte 102,15,58,15,254,4 nop paddd %xmm7,%xmm4 .byte 15,56,204,238 .byte 15,56,203,202 movdqa 384-128(%rcx),%xmm0 paddd %xmm3,%xmm0 .byte 15,56,205,227 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm4,%xmm7 .byte 102,15,58,15,251,4 nop paddd %xmm7,%xmm5 .byte 15,56,204,243 .byte 15,56,203,202 movdqa 416-128(%rcx),%xmm0 paddd %xmm4,%xmm0 .byte 15,56,205,236 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm5,%xmm7 .byte 102,15,58,15,252,4 .byte 15,56,203,202 paddd %xmm7,%xmm6 movdqa 448-128(%rcx),%xmm0 paddd %xmm5,%xmm0 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 .byte 15,56,205,245 movdqa %xmm8,%xmm7 .byte 15,56,203,202 movdqa 480-128(%rcx),%xmm0 paddd %xmm6,%xmm0 nop .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 decq %rdx nop .byte 15,56,203,202 paddd %xmm10,%xmm2 paddd %xmm9,%xmm1 jnz .Loop_shaext pshufd $0xb1,%xmm2,%xmm2 pshufd $0x1b,%xmm1,%xmm7 pshufd $0xb1,%xmm1,%xmm1 punpckhqdq %xmm2,%xmm1 .byte 102,15,58,15,215,8 movdqu %xmm1,(%rdi) movdqu %xmm2,16(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size sha256_block_data_order_hw,.-sha256_block_data_order_hw .globl sha256_block_data_order_ssse3 .hidden sha256_block_data_order_ssse3 .type sha256_block_data_order_ssse3,@function .align 64 sha256_block_data_order_ssse3: .cfi_startproc _CET_ENDBR movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 shlq $4,%rdx subq $96,%rsp leaq (%rsi,%rdx,4),%rdx andq $-64,%rsp movq %rdi,64+0(%rsp) movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %rax,88(%rsp) .cfi_escape 0x0f,0x06,0x77,0xd8,0x00,0x06,0x23,0x08 .Lprologue_ssse3: movl 0(%rdi),%eax movl 4(%rdi),%ebx movl 8(%rdi),%ecx movl 12(%rdi),%edx movl 16(%rdi),%r8d movl 20(%rdi),%r9d movl 24(%rdi),%r10d movl 28(%rdi),%r11d jmp .Lloop_ssse3 .align 16 .Lloop_ssse3: movdqa K256+512(%rip),%xmm7 movdqu 0(%rsi),%xmm0 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 .byte 102,15,56,0,199 movdqu 48(%rsi),%xmm3 leaq K256(%rip),%rbp .byte 102,15,56,0,207 movdqa 0(%rbp),%xmm4 movdqa 32(%rbp),%xmm5 .byte 102,15,56,0,215 paddd %xmm0,%xmm4 movdqa 64(%rbp),%xmm6 .byte 102,15,56,0,223 movdqa 96(%rbp),%xmm7 paddd %xmm1,%xmm5 paddd %xmm2,%xmm6 paddd %xmm3,%xmm7 movdqa %xmm4,0(%rsp) movl %eax,%r14d movdqa %xmm5,16(%rsp) movl %ebx,%edi movdqa %xmm6,32(%rsp) xorl %ecx,%edi movdqa %xmm7,48(%rsp) movl %r8d,%r13d jmp .Lssse3_00_47 .align 16 .Lssse3_00_47: subq $-128,%rbp rorl $14,%r13d movdqa %xmm1,%xmm4 movl %r14d,%eax movl %r9d,%r12d movdqa %xmm3,%xmm7 rorl $9,%r14d xorl %r8d,%r13d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d .byte 102,15,58,15,224,4 andl %r8d,%r12d xorl %r8d,%r13d .byte 102,15,58,15,250,4 addl 0(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d rorl $11,%r14d movdqa %xmm4,%xmm5 xorl %ebx,%r15d addl %r12d,%r11d movdqa %xmm4,%xmm6 rorl $6,%r13d andl %r15d,%edi psrld $3,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi paddd %xmm7,%xmm0 rorl $2,%r14d addl %r11d,%edx psrld $7,%xmm6 addl %edi,%r11d movl %edx,%r13d pshufd $250,%xmm3,%xmm7 addl %r11d,%r14d rorl $14,%r13d pslld $14,%xmm5 movl %r14d,%r11d movl %r8d,%r12d pxor %xmm6,%xmm4 rorl $9,%r14d xorl %edx,%r13d xorl %r9d,%r12d rorl $5,%r13d psrld $11,%xmm6 xorl %r11d,%r14d pxor %xmm5,%xmm4 andl %edx,%r12d xorl %edx,%r13d pslld $11,%xmm5 addl 4(%rsp),%r10d movl %r11d,%edi pxor %xmm6,%xmm4 xorl %r9d,%r12d rorl $11,%r14d movdqa %xmm7,%xmm6 xorl %eax,%edi addl %r12d,%r10d pxor %xmm5,%xmm4 rorl $6,%r13d andl %edi,%r15d xorl %r11d,%r14d psrld $10,%xmm7 addl %r13d,%r10d xorl %eax,%r15d paddd %xmm4,%xmm0 rorl $2,%r14d addl %r10d,%ecx psrlq $17,%xmm6 addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d pxor %xmm6,%xmm7 rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d rorl $9,%r14d psrlq $2,%xmm6 xorl %ecx,%r13d xorl %r8d,%r12d pxor %xmm6,%xmm7 rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d pshufd $128,%xmm7,%xmm7 xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d psrldq $8,%xmm7 xorl %r8d,%r12d rorl $11,%r14d xorl %r11d,%r15d addl %r12d,%r9d rorl $6,%r13d paddd %xmm7,%xmm0 andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d pshufd $80,%xmm0,%xmm7 xorl %r11d,%edi rorl $2,%r14d addl %r9d,%ebx movdqa %xmm7,%xmm6 addl %edi,%r9d movl %ebx,%r13d psrld $10,%xmm7 addl %r9d,%r14d rorl $14,%r13d psrlq $17,%xmm6 movl %r14d,%r9d movl %ecx,%r12d pxor %xmm6,%xmm7 rorl $9,%r14d xorl %ebx,%r13d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d psrlq $2,%xmm6 andl %ebx,%r12d xorl %ebx,%r13d addl 12(%rsp),%r8d pxor %xmm6,%xmm7 movl %r9d,%edi xorl %edx,%r12d rorl $11,%r14d pshufd $8,%xmm7,%xmm7 xorl %r10d,%edi addl %r12d,%r8d movdqa 0(%rbp),%xmm6 rorl $6,%r13d andl %edi,%r15d pslldq $8,%xmm7 xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d paddd %xmm7,%xmm0 rorl $2,%r14d addl %r8d,%eax addl %r15d,%r8d paddd %xmm0,%xmm6 movl %eax,%r13d addl %r8d,%r14d movdqa %xmm6,0(%rsp) rorl $14,%r13d movdqa %xmm2,%xmm4 movl %r14d,%r8d movl %ebx,%r12d movdqa %xmm0,%xmm7 rorl $9,%r14d xorl %eax,%r13d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d .byte 102,15,58,15,225,4 andl %eax,%r12d xorl %eax,%r13d .byte 102,15,58,15,251,4 addl 16(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d rorl $11,%r14d movdqa %xmm4,%xmm5 xorl %r9d,%r15d addl %r12d,%edx movdqa %xmm4,%xmm6 rorl $6,%r13d andl %r15d,%edi psrld $3,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi paddd %xmm7,%xmm1 rorl $2,%r14d addl %edx,%r11d psrld $7,%xmm6 addl %edi,%edx movl %r11d,%r13d pshufd $250,%xmm0,%xmm7 addl %edx,%r14d rorl $14,%r13d pslld $14,%xmm5 movl %r14d,%edx movl %eax,%r12d pxor %xmm6,%xmm4 rorl $9,%r14d xorl %r11d,%r13d xorl %ebx,%r12d rorl $5,%r13d psrld $11,%xmm6 xorl %edx,%r14d pxor %xmm5,%xmm4 andl %r11d,%r12d xorl %r11d,%r13d pslld $11,%xmm5 addl 20(%rsp),%ecx movl %edx,%edi pxor %xmm6,%xmm4 xorl %ebx,%r12d rorl $11,%r14d movdqa %xmm7,%xmm6 xorl %r8d,%edi addl %r12d,%ecx pxor %xmm5,%xmm4 rorl $6,%r13d andl %edi,%r15d xorl %edx,%r14d psrld $10,%xmm7 addl %r13d,%ecx xorl %r8d,%r15d paddd %xmm4,%xmm1 rorl $2,%r14d addl %ecx,%r10d psrlq $17,%xmm6 addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d pxor %xmm6,%xmm7 rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d rorl $9,%r14d psrlq $2,%xmm6 xorl %r10d,%r13d xorl %eax,%r12d pxor %xmm6,%xmm7 rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d pshufd $128,%xmm7,%xmm7 xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d psrldq $8,%xmm7 xorl %eax,%r12d rorl $11,%r14d xorl %edx,%r15d addl %r12d,%ebx rorl $6,%r13d paddd %xmm7,%xmm1 andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx pshufd $80,%xmm1,%xmm7 xorl %edx,%edi rorl $2,%r14d addl %ebx,%r9d movdqa %xmm7,%xmm6 addl %edi,%ebx movl %r9d,%r13d psrld $10,%xmm7 addl %ebx,%r14d rorl $14,%r13d psrlq $17,%xmm6 movl %r14d,%ebx movl %r10d,%r12d pxor %xmm6,%xmm7 rorl $9,%r14d xorl %r9d,%r13d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d psrlq $2,%xmm6 andl %r9d,%r12d xorl %r9d,%r13d addl 28(%rsp),%eax pxor %xmm6,%xmm7 movl %ebx,%edi xorl %r11d,%r12d rorl $11,%r14d pshufd $8,%xmm7,%xmm7 xorl %ecx,%edi addl %r12d,%eax movdqa 32(%rbp),%xmm6 rorl $6,%r13d andl %edi,%r15d pslldq $8,%xmm7 xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d paddd %xmm7,%xmm1 rorl $2,%r14d addl %eax,%r8d addl %r15d,%eax paddd %xmm1,%xmm6 movl %r8d,%r13d addl %eax,%r14d movdqa %xmm6,16(%rsp) rorl $14,%r13d movdqa %xmm3,%xmm4 movl %r14d,%eax movl %r9d,%r12d movdqa %xmm1,%xmm7 rorl $9,%r14d xorl %r8d,%r13d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d .byte 102,15,58,15,226,4 andl %r8d,%r12d xorl %r8d,%r13d .byte 102,15,58,15,248,4 addl 32(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d rorl $11,%r14d movdqa %xmm4,%xmm5 xorl %ebx,%r15d addl %r12d,%r11d movdqa %xmm4,%xmm6 rorl $6,%r13d andl %r15d,%edi psrld $3,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi paddd %xmm7,%xmm2 rorl $2,%r14d addl %r11d,%edx psrld $7,%xmm6 addl %edi,%r11d movl %edx,%r13d pshufd $250,%xmm1,%xmm7 addl %r11d,%r14d rorl $14,%r13d pslld $14,%xmm5 movl %r14d,%r11d movl %r8d,%r12d pxor %xmm6,%xmm4 rorl $9,%r14d xorl %edx,%r13d xorl %r9d,%r12d rorl $5,%r13d psrld $11,%xmm6 xorl %r11d,%r14d pxor %xmm5,%xmm4 andl %edx,%r12d xorl %edx,%r13d pslld $11,%xmm5 addl 36(%rsp),%r10d movl %r11d,%edi pxor %xmm6,%xmm4 xorl %r9d,%r12d rorl $11,%r14d movdqa %xmm7,%xmm6 xorl %eax,%edi addl %r12d,%r10d pxor %xmm5,%xmm4 rorl $6,%r13d andl %edi,%r15d xorl %r11d,%r14d psrld $10,%xmm7 addl %r13d,%r10d xorl %eax,%r15d paddd %xmm4,%xmm2 rorl $2,%r14d addl %r10d,%ecx psrlq $17,%xmm6 addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d pxor %xmm6,%xmm7 rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d rorl $9,%r14d psrlq $2,%xmm6 xorl %ecx,%r13d xorl %r8d,%r12d pxor %xmm6,%xmm7 rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d pshufd $128,%xmm7,%xmm7 xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d psrldq $8,%xmm7 xorl %r8d,%r12d rorl $11,%r14d xorl %r11d,%r15d addl %r12d,%r9d rorl $6,%r13d paddd %xmm7,%xmm2 andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d pshufd $80,%xmm2,%xmm7 xorl %r11d,%edi rorl $2,%r14d addl %r9d,%ebx movdqa %xmm7,%xmm6 addl %edi,%r9d movl %ebx,%r13d psrld $10,%xmm7 addl %r9d,%r14d rorl $14,%r13d psrlq $17,%xmm6 movl %r14d,%r9d movl %ecx,%r12d pxor %xmm6,%xmm7 rorl $9,%r14d xorl %ebx,%r13d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d psrlq $2,%xmm6 andl %ebx,%r12d xorl %ebx,%r13d addl 44(%rsp),%r8d pxor %xmm6,%xmm7 movl %r9d,%edi xorl %edx,%r12d rorl $11,%r14d pshufd $8,%xmm7,%xmm7 xorl %r10d,%edi addl %r12d,%r8d movdqa 64(%rbp),%xmm6 rorl $6,%r13d andl %edi,%r15d pslldq $8,%xmm7 xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d paddd %xmm7,%xmm2 rorl $2,%r14d addl %r8d,%eax addl %r15d,%r8d paddd %xmm2,%xmm6 movl %eax,%r13d addl %r8d,%r14d movdqa %xmm6,32(%rsp) rorl $14,%r13d movdqa %xmm0,%xmm4 movl %r14d,%r8d movl %ebx,%r12d movdqa %xmm2,%xmm7 rorl $9,%r14d xorl %eax,%r13d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d .byte 102,15,58,15,227,4 andl %eax,%r12d xorl %eax,%r13d .byte 102,15,58,15,249,4 addl 48(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d rorl $11,%r14d movdqa %xmm4,%xmm5 xorl %r9d,%r15d addl %r12d,%edx movdqa %xmm4,%xmm6 rorl $6,%r13d andl %r15d,%edi psrld $3,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi paddd %xmm7,%xmm3 rorl $2,%r14d addl %edx,%r11d psrld $7,%xmm6 addl %edi,%edx movl %r11d,%r13d pshufd $250,%xmm2,%xmm7 addl %edx,%r14d rorl $14,%r13d pslld $14,%xmm5 movl %r14d,%edx movl %eax,%r12d pxor %xmm6,%xmm4 rorl $9,%r14d xorl %r11d,%r13d xorl %ebx,%r12d rorl $5,%r13d psrld $11,%xmm6 xorl %edx,%r14d pxor %xmm5,%xmm4 andl %r11d,%r12d xorl %r11d,%r13d pslld $11,%xmm5 addl 52(%rsp),%ecx movl %edx,%edi pxor %xmm6,%xmm4 xorl %ebx,%r12d rorl $11,%r14d movdqa %xmm7,%xmm6 xorl %r8d,%edi addl %r12d,%ecx pxor %xmm5,%xmm4 rorl $6,%r13d andl %edi,%r15d xorl %edx,%r14d psrld $10,%xmm7 addl %r13d,%ecx xorl %r8d,%r15d paddd %xmm4,%xmm3 rorl $2,%r14d addl %ecx,%r10d psrlq $17,%xmm6 addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d pxor %xmm6,%xmm7 rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d rorl $9,%r14d psrlq $2,%xmm6 xorl %r10d,%r13d xorl %eax,%r12d pxor %xmm6,%xmm7 rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d pshufd $128,%xmm7,%xmm7 xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d psrldq $8,%xmm7 xorl %eax,%r12d rorl $11,%r14d xorl %edx,%r15d addl %r12d,%ebx rorl $6,%r13d paddd %xmm7,%xmm3 andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx pshufd $80,%xmm3,%xmm7 xorl %edx,%edi rorl $2,%r14d addl %ebx,%r9d movdqa %xmm7,%xmm6 addl %edi,%ebx movl %r9d,%r13d psrld $10,%xmm7 addl %ebx,%r14d rorl $14,%r13d psrlq $17,%xmm6 movl %r14d,%ebx movl %r10d,%r12d pxor %xmm6,%xmm7 rorl $9,%r14d xorl %r9d,%r13d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d psrlq $2,%xmm6 andl %r9d,%r12d xorl %r9d,%r13d addl 60(%rsp),%eax pxor %xmm6,%xmm7 movl %ebx,%edi xorl %r11d,%r12d rorl $11,%r14d pshufd $8,%xmm7,%xmm7 xorl %ecx,%edi addl %r12d,%eax movdqa 96(%rbp),%xmm6 rorl $6,%r13d andl %edi,%r15d pslldq $8,%xmm7 xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d paddd %xmm7,%xmm3 rorl $2,%r14d addl %eax,%r8d addl %r15d,%eax paddd %xmm3,%xmm6 movl %r8d,%r13d addl %eax,%r14d movdqa %xmm6,48(%rsp) cmpb $0,131(%rbp) jne .Lssse3_00_47 rorl $14,%r13d movl %r14d,%eax movl %r9d,%r12d rorl $9,%r14d xorl %r8d,%r13d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d andl %r8d,%r12d xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d rorl $11,%r14d xorl %ebx,%r15d addl %r12d,%r11d rorl $6,%r13d andl %r15d,%edi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi rorl $2,%r14d addl %r11d,%edx addl %edi,%r11d movl %edx,%r13d addl %r11d,%r14d rorl $14,%r13d movl %r14d,%r11d movl %r8d,%r12d rorl $9,%r14d xorl %edx,%r13d xorl %r9d,%r12d rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d addl 4(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d rorl $11,%r14d xorl %eax,%edi addl %r12d,%r10d rorl $6,%r13d andl %edi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d rorl $2,%r14d addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d rorl $9,%r14d xorl %ecx,%r13d xorl %r8d,%r12d rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d xorl %r8d,%r12d rorl $11,%r14d xorl %r11d,%r15d addl %r12d,%r9d rorl $6,%r13d andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%edi rorl $2,%r14d addl %r9d,%ebx addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d rorl $9,%r14d xorl %ebx,%r13d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d rorl $11,%r14d xorl %r10d,%edi addl %r12d,%r8d rorl $6,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d rorl $2,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d rorl $14,%r13d movl %r14d,%r8d movl %ebx,%r12d rorl $9,%r14d xorl %eax,%r13d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d andl %eax,%r12d xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d rorl $11,%r14d xorl %r9d,%r15d addl %r12d,%edx rorl $6,%r13d andl %r15d,%edi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi rorl $2,%r14d addl %edx,%r11d addl %edi,%edx movl %r11d,%r13d addl %edx,%r14d rorl $14,%r13d movl %r14d,%edx movl %eax,%r12d rorl $9,%r14d xorl %r11d,%r13d xorl %ebx,%r12d rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d addl 20(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d rorl $11,%r14d xorl %r8d,%edi addl %r12d,%ecx rorl $6,%r13d andl %edi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d rorl $2,%r14d addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d rorl $9,%r14d xorl %r10d,%r13d xorl %eax,%r12d rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d xorl %eax,%r12d rorl $11,%r14d xorl %edx,%r15d addl %r12d,%ebx rorl $6,%r13d andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%edi rorl $2,%r14d addl %ebx,%r9d addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d rorl $9,%r14d xorl %r9d,%r13d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d rorl $11,%r14d xorl %ecx,%edi addl %r12d,%eax rorl $6,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d rorl $2,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d rorl $14,%r13d movl %r14d,%eax movl %r9d,%r12d rorl $9,%r14d xorl %r8d,%r13d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d andl %r8d,%r12d xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d rorl $11,%r14d xorl %ebx,%r15d addl %r12d,%r11d rorl $6,%r13d andl %r15d,%edi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi rorl $2,%r14d addl %r11d,%edx addl %edi,%r11d movl %edx,%r13d addl %r11d,%r14d rorl $14,%r13d movl %r14d,%r11d movl %r8d,%r12d rorl $9,%r14d xorl %edx,%r13d xorl %r9d,%r12d rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d addl 36(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d rorl $11,%r14d xorl %eax,%edi addl %r12d,%r10d rorl $6,%r13d andl %edi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d rorl $2,%r14d addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d rorl $9,%r14d xorl %ecx,%r13d xorl %r8d,%r12d rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d xorl %r8d,%r12d rorl $11,%r14d xorl %r11d,%r15d addl %r12d,%r9d rorl $6,%r13d andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%edi rorl $2,%r14d addl %r9d,%ebx addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d rorl $9,%r14d xorl %ebx,%r13d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d rorl $11,%r14d xorl %r10d,%edi addl %r12d,%r8d rorl $6,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d rorl $2,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d rorl $14,%r13d movl %r14d,%r8d movl %ebx,%r12d rorl $9,%r14d xorl %eax,%r13d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d andl %eax,%r12d xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d rorl $11,%r14d xorl %r9d,%r15d addl %r12d,%edx rorl $6,%r13d andl %r15d,%edi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi rorl $2,%r14d addl %edx,%r11d addl %edi,%edx movl %r11d,%r13d addl %edx,%r14d rorl $14,%r13d movl %r14d,%edx movl %eax,%r12d rorl $9,%r14d xorl %r11d,%r13d xorl %ebx,%r12d rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d addl 52(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d rorl $11,%r14d xorl %r8d,%edi addl %r12d,%ecx rorl $6,%r13d andl %edi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d rorl $2,%r14d addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d rorl $9,%r14d xorl %r10d,%r13d xorl %eax,%r12d rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d xorl %eax,%r12d rorl $11,%r14d xorl %edx,%r15d addl %r12d,%ebx rorl $6,%r13d andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%edi rorl $2,%r14d addl %ebx,%r9d addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d rorl $9,%r14d xorl %r9d,%r13d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d rorl $11,%r14d xorl %ecx,%edi addl %r12d,%eax rorl $6,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d rorl $2,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d movq 64+0(%rsp),%rdi movl %r14d,%eax addl 0(%rdi),%eax leaq 64(%rsi),%rsi addl 4(%rdi),%ebx addl 8(%rdi),%ecx addl 12(%rdi),%edx addl 16(%rdi),%r8d addl 20(%rdi),%r9d addl 24(%rdi),%r10d addl 28(%rdi),%r11d cmpq 64+16(%rsp),%rsi movl %eax,0(%rdi) movl %ebx,4(%rdi) movl %ecx,8(%rdi) movl %edx,12(%rdi) movl %r8d,16(%rdi) movl %r9d,20(%rdi) movl %r10d,24(%rdi) movl %r11d,28(%rdi) jb .Lloop_ssse3 movq 88(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue_ssse3: .byte 0xf3,0xc3 .cfi_endproc .size sha256_block_data_order_ssse3,.-sha256_block_data_order_ssse3 .globl sha256_block_data_order_avx .hidden sha256_block_data_order_avx .type sha256_block_data_order_avx,@function .align 64 sha256_block_data_order_avx: .cfi_startproc _CET_ENDBR movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 shlq $4,%rdx subq $96,%rsp leaq (%rsi,%rdx,4),%rdx andq $-64,%rsp movq %rdi,64+0(%rsp) movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %rax,88(%rsp) .cfi_escape 0x0f,0x06,0x77,0xd8,0x00,0x06,0x23,0x08 .Lprologue_avx: vzeroupper movl 0(%rdi),%eax movl 4(%rdi),%ebx movl 8(%rdi),%ecx movl 12(%rdi),%edx movl 16(%rdi),%r8d movl 20(%rdi),%r9d movl 24(%rdi),%r10d movl 28(%rdi),%r11d vmovdqa K256+512+32(%rip),%xmm8 vmovdqa K256+512+64(%rip),%xmm9 jmp .Lloop_avx .align 16 .Lloop_avx: vmovdqa K256+512(%rip),%xmm7 vmovdqu 0(%rsi),%xmm0 vmovdqu 16(%rsi),%xmm1 vmovdqu 32(%rsi),%xmm2 vmovdqu 48(%rsi),%xmm3 vpshufb %xmm7,%xmm0,%xmm0 leaq K256(%rip),%rbp vpshufb %xmm7,%xmm1,%xmm1 vpshufb %xmm7,%xmm2,%xmm2 vpaddd 0(%rbp),%xmm0,%xmm4 vpshufb %xmm7,%xmm3,%xmm3 vpaddd 32(%rbp),%xmm1,%xmm5 vpaddd 64(%rbp),%xmm2,%xmm6 vpaddd 96(%rbp),%xmm3,%xmm7 vmovdqa %xmm4,0(%rsp) movl %eax,%r14d vmovdqa %xmm5,16(%rsp) movl %ebx,%edi vmovdqa %xmm6,32(%rsp) xorl %ecx,%edi vmovdqa %xmm7,48(%rsp) movl %r8d,%r13d jmp .Lavx_00_47 .align 16 .Lavx_00_47: subq $-128,%rbp vpalignr $4,%xmm0,%xmm1,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d vpalignr $4,%xmm2,%xmm3,%xmm7 shrdl $9,%r14d,%r14d xorl %r8d,%r13d xorl %r10d,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpaddd %xmm7,%xmm0,%xmm0 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d vpsrld $3,%xmm4,%xmm7 xorl %r10d,%r12d shrdl $11,%r14d,%r14d xorl %ebx,%r15d vpslld $14,%xmm4,%xmm5 addl %r12d,%r11d shrdl $6,%r13d,%r13d andl %r15d,%edi vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi vpshufd $250,%xmm3,%xmm7 shrdl $2,%r14d,%r14d addl %r11d,%edx addl %edi,%r11d vpsrld $11,%xmm6,%xmm6 movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%r11d movl %r8d,%r12d shrdl $9,%r14d,%r14d vpslld $11,%xmm5,%xmm5 xorl %edx,%r13d xorl %r9d,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d vpsrld $10,%xmm7,%xmm6 addl 4(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d vpxor %xmm5,%xmm4,%xmm4 shrdl $11,%r14d,%r14d xorl %eax,%edi addl %r12d,%r10d vpsrlq $17,%xmm7,%xmm7 shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r11d,%r14d vpaddd %xmm4,%xmm0,%xmm0 addl %r13d,%r10d xorl %eax,%r15d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d vpsrlq $2,%xmm7,%xmm7 addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d vpxor %xmm7,%xmm6,%xmm6 movl %edx,%r12d shrdl $9,%r14d,%r14d xorl %ecx,%r13d vpshufb %xmm8,%xmm6,%xmm6 xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d vpaddd %xmm6,%xmm0,%xmm0 andl %ecx,%r12d xorl %ecx,%r13d addl 8(%rsp),%r9d vpshufd $80,%xmm0,%xmm7 movl %r10d,%r15d xorl %r8d,%r12d shrdl $11,%r14d,%r14d vpsrld $10,%xmm7,%xmm6 xorl %r11d,%r15d addl %r12d,%r9d shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d vpxor %xmm7,%xmm6,%xmm6 xorl %r11d,%edi shrdl $2,%r14d,%r14d addl %r9d,%ebx vpsrlq $2,%xmm7,%xmm7 addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d vpxor %xmm7,%xmm6,%xmm6 shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d vpshufb %xmm9,%xmm6,%xmm6 shrdl $9,%r14d,%r14d xorl %ebx,%r13d xorl %edx,%r12d vpaddd %xmm6,%xmm0,%xmm0 shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vpaddd 0(%rbp),%xmm0,%xmm6 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d shrdl $11,%r14d,%r14d xorl %r10d,%edi addl %r12d,%r8d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d shrdl $2,%r14d,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,0(%rsp) vpalignr $4,%xmm1,%xmm2,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d vpalignr $4,%xmm3,%xmm0,%xmm7 shrdl $9,%r14d,%r14d xorl %eax,%r13d xorl %ecx,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpaddd %xmm7,%xmm1,%xmm1 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d vpsrld $3,%xmm4,%xmm7 xorl %ecx,%r12d shrdl $11,%r14d,%r14d xorl %r9d,%r15d vpslld $14,%xmm4,%xmm5 addl %r12d,%edx shrdl $6,%r13d,%r13d andl %r15d,%edi vpxor %xmm6,%xmm7,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi vpshufd $250,%xmm0,%xmm7 shrdl $2,%r14d,%r14d addl %edx,%r11d addl %edi,%edx vpsrld $11,%xmm6,%xmm6 movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%edx movl %eax,%r12d shrdl $9,%r14d,%r14d vpslld $11,%xmm5,%xmm5 xorl %r11d,%r13d xorl %ebx,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d vpsrld $10,%xmm7,%xmm6 addl 20(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d vpxor %xmm5,%xmm4,%xmm4 shrdl $11,%r14d,%r14d xorl %r8d,%edi addl %r12d,%ecx vpsrlq $17,%xmm7,%xmm7 shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %edx,%r14d vpaddd %xmm4,%xmm1,%xmm1 addl %r13d,%ecx xorl %r8d,%r15d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d vpsrlq $2,%xmm7,%xmm7 addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx vpxor %xmm7,%xmm6,%xmm6 movl %r11d,%r12d shrdl $9,%r14d,%r14d xorl %r10d,%r13d vpshufb %xmm8,%xmm6,%xmm6 xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d vpaddd %xmm6,%xmm1,%xmm1 andl %r10d,%r12d xorl %r10d,%r13d addl 24(%rsp),%ebx vpshufd $80,%xmm1,%xmm7 movl %ecx,%r15d xorl %eax,%r12d shrdl $11,%r14d,%r14d vpsrld $10,%xmm7,%xmm6 xorl %edx,%r15d addl %r12d,%ebx shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx vpxor %xmm7,%xmm6,%xmm6 xorl %edx,%edi shrdl $2,%r14d,%r14d addl %ebx,%r9d vpsrlq $2,%xmm7,%xmm7 addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d vpxor %xmm7,%xmm6,%xmm6 shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d vpshufb %xmm9,%xmm6,%xmm6 shrdl $9,%r14d,%r14d xorl %r9d,%r13d xorl %r11d,%r12d vpaddd %xmm6,%xmm1,%xmm1 shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpaddd 32(%rbp),%xmm1,%xmm6 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d shrdl $11,%r14d,%r14d xorl %ecx,%edi addl %r12d,%eax shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d shrdl $2,%r14d,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,16(%rsp) vpalignr $4,%xmm2,%xmm3,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d vpalignr $4,%xmm0,%xmm1,%xmm7 shrdl $9,%r14d,%r14d xorl %r8d,%r13d xorl %r10d,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpaddd %xmm7,%xmm2,%xmm2 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d vpsrld $3,%xmm4,%xmm7 xorl %r10d,%r12d shrdl $11,%r14d,%r14d xorl %ebx,%r15d vpslld $14,%xmm4,%xmm5 addl %r12d,%r11d shrdl $6,%r13d,%r13d andl %r15d,%edi vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi vpshufd $250,%xmm1,%xmm7 shrdl $2,%r14d,%r14d addl %r11d,%edx addl %edi,%r11d vpsrld $11,%xmm6,%xmm6 movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%r11d movl %r8d,%r12d shrdl $9,%r14d,%r14d vpslld $11,%xmm5,%xmm5 xorl %edx,%r13d xorl %r9d,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d vpsrld $10,%xmm7,%xmm6 addl 36(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d vpxor %xmm5,%xmm4,%xmm4 shrdl $11,%r14d,%r14d xorl %eax,%edi addl %r12d,%r10d vpsrlq $17,%xmm7,%xmm7 shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r11d,%r14d vpaddd %xmm4,%xmm2,%xmm2 addl %r13d,%r10d xorl %eax,%r15d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d vpsrlq $2,%xmm7,%xmm7 addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d vpxor %xmm7,%xmm6,%xmm6 movl %edx,%r12d shrdl $9,%r14d,%r14d xorl %ecx,%r13d vpshufb %xmm8,%xmm6,%xmm6 xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d vpaddd %xmm6,%xmm2,%xmm2 andl %ecx,%r12d xorl %ecx,%r13d addl 40(%rsp),%r9d vpshufd $80,%xmm2,%xmm7 movl %r10d,%r15d xorl %r8d,%r12d shrdl $11,%r14d,%r14d vpsrld $10,%xmm7,%xmm6 xorl %r11d,%r15d addl %r12d,%r9d shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d vpxor %xmm7,%xmm6,%xmm6 xorl %r11d,%edi shrdl $2,%r14d,%r14d addl %r9d,%ebx vpsrlq $2,%xmm7,%xmm7 addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d vpxor %xmm7,%xmm6,%xmm6 shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d vpshufb %xmm9,%xmm6,%xmm6 shrdl $9,%r14d,%r14d xorl %ebx,%r13d xorl %edx,%r12d vpaddd %xmm6,%xmm2,%xmm2 shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vpaddd 64(%rbp),%xmm2,%xmm6 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d shrdl $11,%r14d,%r14d xorl %r10d,%edi addl %r12d,%r8d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d shrdl $2,%r14d,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,32(%rsp) vpalignr $4,%xmm3,%xmm0,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d vpalignr $4,%xmm1,%xmm2,%xmm7 shrdl $9,%r14d,%r14d xorl %eax,%r13d xorl %ecx,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpaddd %xmm7,%xmm3,%xmm3 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d vpsrld $3,%xmm4,%xmm7 xorl %ecx,%r12d shrdl $11,%r14d,%r14d xorl %r9d,%r15d vpslld $14,%xmm4,%xmm5 addl %r12d,%edx shrdl $6,%r13d,%r13d andl %r15d,%edi vpxor %xmm6,%xmm7,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi vpshufd $250,%xmm2,%xmm7 shrdl $2,%r14d,%r14d addl %edx,%r11d addl %edi,%edx vpsrld $11,%xmm6,%xmm6 movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%edx movl %eax,%r12d shrdl $9,%r14d,%r14d vpslld $11,%xmm5,%xmm5 xorl %r11d,%r13d xorl %ebx,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d vpsrld $10,%xmm7,%xmm6 addl 52(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d vpxor %xmm5,%xmm4,%xmm4 shrdl $11,%r14d,%r14d xorl %r8d,%edi addl %r12d,%ecx vpsrlq $17,%xmm7,%xmm7 shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %edx,%r14d vpaddd %xmm4,%xmm3,%xmm3 addl %r13d,%ecx xorl %r8d,%r15d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d vpsrlq $2,%xmm7,%xmm7 addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx vpxor %xmm7,%xmm6,%xmm6 movl %r11d,%r12d shrdl $9,%r14d,%r14d xorl %r10d,%r13d vpshufb %xmm8,%xmm6,%xmm6 xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d vpaddd %xmm6,%xmm3,%xmm3 andl %r10d,%r12d xorl %r10d,%r13d addl 56(%rsp),%ebx vpshufd $80,%xmm3,%xmm7 movl %ecx,%r15d xorl %eax,%r12d shrdl $11,%r14d,%r14d vpsrld $10,%xmm7,%xmm6 xorl %edx,%r15d addl %r12d,%ebx shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx vpxor %xmm7,%xmm6,%xmm6 xorl %edx,%edi shrdl $2,%r14d,%r14d addl %ebx,%r9d vpsrlq $2,%xmm7,%xmm7 addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d vpxor %xmm7,%xmm6,%xmm6 shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d vpshufb %xmm9,%xmm6,%xmm6 shrdl $9,%r14d,%r14d xorl %r9d,%r13d xorl %r11d,%r12d vpaddd %xmm6,%xmm3,%xmm3 shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpaddd 96(%rbp),%xmm3,%xmm6 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d shrdl $11,%r14d,%r14d xorl %ecx,%edi addl %r12d,%eax shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d shrdl $2,%r14d,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,48(%rsp) cmpb $0,131(%rbp) jne .Lavx_00_47 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d shrdl $9,%r14d,%r14d xorl %r8d,%r13d xorl %r10d,%r12d shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d shrdl $11,%r14d,%r14d xorl %ebx,%r15d addl %r12d,%r11d shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi shrdl $2,%r14d,%r14d addl %r11d,%edx addl %edi,%r11d movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r11d movl %r8d,%r12d shrdl $9,%r14d,%r14d xorl %edx,%r13d xorl %r9d,%r12d shrdl $5,%r13d,%r13d xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d addl 4(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d shrdl $11,%r14d,%r14d xorl %eax,%edi addl %r12d,%r10d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d shrdl $2,%r14d,%r14d addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d movl %edx,%r12d shrdl $9,%r14d,%r14d xorl %ecx,%r13d xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d andl %ecx,%r12d xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d xorl %r8d,%r12d shrdl $11,%r14d,%r14d xorl %r11d,%r15d addl %r12d,%r9d shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%edi shrdl $2,%r14d,%r14d addl %r9d,%ebx addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d shrdl $9,%r14d,%r14d xorl %ebx,%r13d xorl %edx,%r12d shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d shrdl $11,%r14d,%r14d xorl %r10d,%edi addl %r12d,%r8d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d shrdl $2,%r14d,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d shrdl $9,%r14d,%r14d xorl %eax,%r13d xorl %ecx,%r12d shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d shrdl $11,%r14d,%r14d xorl %r9d,%r15d addl %r12d,%edx shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi shrdl $2,%r14d,%r14d addl %edx,%r11d addl %edi,%edx movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%edx movl %eax,%r12d shrdl $9,%r14d,%r14d xorl %r11d,%r13d xorl %ebx,%r12d shrdl $5,%r13d,%r13d xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d addl 20(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d shrdl $11,%r14d,%r14d xorl %r8d,%edi addl %r12d,%ecx shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d shrdl $2,%r14d,%r14d addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx movl %r11d,%r12d shrdl $9,%r14d,%r14d xorl %r10d,%r13d xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d andl %r10d,%r12d xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d xorl %eax,%r12d shrdl $11,%r14d,%r14d xorl %edx,%r15d addl %r12d,%ebx shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%edi shrdl $2,%r14d,%r14d addl %ebx,%r9d addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d shrdl $9,%r14d,%r14d xorl %r9d,%r13d xorl %r11d,%r12d shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d shrdl $11,%r14d,%r14d xorl %ecx,%edi addl %r12d,%eax shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d shrdl $2,%r14d,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d shrdl $9,%r14d,%r14d xorl %r8d,%r13d xorl %r10d,%r12d shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d xorl %r10d,%r12d shrdl $11,%r14d,%r14d xorl %ebx,%r15d addl %r12d,%r11d shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%edi shrdl $2,%r14d,%r14d addl %r11d,%edx addl %edi,%r11d movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r11d movl %r8d,%r12d shrdl $9,%r14d,%r14d xorl %edx,%r13d xorl %r9d,%r12d shrdl $5,%r13d,%r13d xorl %r11d,%r14d andl %edx,%r12d xorl %edx,%r13d addl 36(%rsp),%r10d movl %r11d,%edi xorl %r9d,%r12d shrdl $11,%r14d,%r14d xorl %eax,%edi addl %r12d,%r10d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d shrdl $2,%r14d,%r14d addl %r10d,%ecx addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d movl %edx,%r12d shrdl $9,%r14d,%r14d xorl %ecx,%r13d xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d andl %ecx,%r12d xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d xorl %r8d,%r12d shrdl $11,%r14d,%r14d xorl %r11d,%r15d addl %r12d,%r9d shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%edi shrdl $2,%r14d,%r14d addl %r9d,%ebx addl %edi,%r9d movl %ebx,%r13d addl %r9d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d shrdl $9,%r14d,%r14d xorl %ebx,%r13d xorl %edx,%r12d shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%edi xorl %edx,%r12d shrdl $11,%r14d,%r14d xorl %r10d,%edi addl %r12d,%r8d shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d shrdl $2,%r14d,%r14d addl %r8d,%eax addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d shrdl $9,%r14d,%r14d xorl %eax,%r13d xorl %ecx,%r12d shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d xorl %ecx,%r12d shrdl $11,%r14d,%r14d xorl %r9d,%r15d addl %r12d,%edx shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%edi shrdl $2,%r14d,%r14d addl %edx,%r11d addl %edi,%edx movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%edx movl %eax,%r12d shrdl $9,%r14d,%r14d xorl %r11d,%r13d xorl %ebx,%r12d shrdl $5,%r13d,%r13d xorl %edx,%r14d andl %r11d,%r12d xorl %r11d,%r13d addl 52(%rsp),%ecx movl %edx,%edi xorl %ebx,%r12d shrdl $11,%r14d,%r14d xorl %r8d,%edi addl %r12d,%ecx shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d shrdl $2,%r14d,%r14d addl %ecx,%r10d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx movl %r11d,%r12d shrdl $9,%r14d,%r14d xorl %r10d,%r13d xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d andl %r10d,%r12d xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d xorl %eax,%r12d shrdl $11,%r14d,%r14d xorl %edx,%r15d addl %r12d,%ebx shrdl $6,%r13d,%r13d andl %r15d,%edi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%edi shrdl $2,%r14d,%r14d addl %ebx,%r9d addl %edi,%ebx movl %r9d,%r13d addl %ebx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d shrdl $9,%r14d,%r14d xorl %r9d,%r13d xorl %r11d,%r12d shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%edi xorl %r11d,%r12d shrdl $11,%r14d,%r14d xorl %ecx,%edi addl %r12d,%eax shrdl $6,%r13d,%r13d andl %edi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d shrdl $2,%r14d,%r14d addl %eax,%r8d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d movq 64+0(%rsp),%rdi movl %r14d,%eax addl 0(%rdi),%eax leaq 64(%rsi),%rsi addl 4(%rdi),%ebx addl 8(%rdi),%ecx addl 12(%rdi),%edx addl 16(%rdi),%r8d addl 20(%rdi),%r9d addl 24(%rdi),%r10d addl 28(%rdi),%r11d cmpq 64+16(%rsp),%rsi movl %eax,0(%rdi) movl %ebx,4(%rdi) movl %ecx,8(%rdi) movl %edx,12(%rdi) movl %r8d,16(%rdi) movl %r9d,20(%rdi) movl %r10d,24(%rdi) movl %r11d,28(%rdi) jb .Lloop_avx movq 88(%rsp),%rsi .cfi_def_cfa %rsi,8 vzeroupper movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue_avx: .byte 0xf3,0xc3 .cfi_endproc .size sha256_block_data_order_avx,.-sha256_block_data_order_avx #endif
marvin-hansen/iggy-streaming-system
23,206
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/ghash-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .globl gcm_init_clmul .hidden gcm_init_clmul .type gcm_init_clmul,@function .align 16 gcm_init_clmul: .cfi_startproc _CET_ENDBR .L_init_clmul: movdqu (%rsi),%xmm2 pshufd $78,%xmm2,%xmm2 pshufd $255,%xmm2,%xmm4 movdqa %xmm2,%xmm3 psllq $1,%xmm2 pxor %xmm5,%xmm5 psrlq $63,%xmm3 pcmpgtd %xmm4,%xmm5 pslldq $8,%xmm3 por %xmm3,%xmm2 pand .L0x1c2_polynomial(%rip),%xmm5 pxor %xmm5,%xmm2 pshufd $78,%xmm2,%xmm6 movdqa %xmm2,%xmm0 pxor %xmm2,%xmm6 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,222,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 pshufd $78,%xmm2,%xmm3 pshufd $78,%xmm0,%xmm4 pxor %xmm2,%xmm3 movdqu %xmm2,0(%rdi) pxor %xmm0,%xmm4 movdqu %xmm0,16(%rdi) .byte 102,15,58,15,227,8 movdqu %xmm4,32(%rdi) movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,222,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 movdqa %xmm0,%xmm5 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,222,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 pshufd $78,%xmm5,%xmm3 pshufd $78,%xmm0,%xmm4 pxor %xmm5,%xmm3 movdqu %xmm5,48(%rdi) pxor %xmm0,%xmm4 movdqu %xmm0,64(%rdi) .byte 102,15,58,15,227,8 movdqu %xmm4,80(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size gcm_init_clmul,.-gcm_init_clmul .globl gcm_gmult_clmul .hidden gcm_gmult_clmul .type gcm_gmult_clmul,@function .align 16 gcm_gmult_clmul: .cfi_startproc _CET_ENDBR .L_gmult_clmul: movdqu (%rdi),%xmm0 movdqa .Lbswap_mask(%rip),%xmm5 movdqu (%rsi),%xmm2 movdqu 32(%rsi),%xmm4 .byte 102,15,56,0,197 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,220,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 .byte 102,15,56,0,197 movdqu %xmm0,(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size gcm_gmult_clmul,.-gcm_gmult_clmul .globl gcm_ghash_clmul .hidden gcm_ghash_clmul .type gcm_ghash_clmul,@function .align 32 gcm_ghash_clmul: .cfi_startproc _CET_ENDBR .L_ghash_clmul: movdqa .Lbswap_mask(%rip),%xmm10 movdqu (%rdi),%xmm0 movdqu (%rsi),%xmm2 movdqu 32(%rsi),%xmm7 .byte 102,65,15,56,0,194 subq $0x10,%rcx jz .Lodd_tail movdqu 16(%rsi),%xmm6 cmpq $0x30,%rcx jb .Lskip4x subq $0x30,%rcx movq $0xA040608020C0E000,%rax movdqu 48(%rsi),%xmm14 movdqu 64(%rsi),%xmm15 movdqu 48(%rdx),%xmm3 movdqu 32(%rdx),%xmm11 .byte 102,65,15,56,0,218 .byte 102,69,15,56,0,218 movdqa %xmm3,%xmm5 pshufd $78,%xmm3,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,68,218,0 .byte 102,15,58,68,234,17 .byte 102,15,58,68,231,0 movdqa %xmm11,%xmm13 pshufd $78,%xmm11,%xmm12 pxor %xmm11,%xmm12 .byte 102,68,15,58,68,222,0 .byte 102,68,15,58,68,238,17 .byte 102,68,15,58,68,231,16 xorps %xmm11,%xmm3 xorps %xmm13,%xmm5 movups 80(%rsi),%xmm7 xorps %xmm12,%xmm4 movdqu 16(%rdx),%xmm11 movdqu 0(%rdx),%xmm8 .byte 102,69,15,56,0,218 .byte 102,69,15,56,0,194 movdqa %xmm11,%xmm13 pshufd $78,%xmm11,%xmm12 pxor %xmm8,%xmm0 pxor %xmm11,%xmm12 .byte 102,69,15,58,68,222,0 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm8 pxor %xmm0,%xmm8 .byte 102,69,15,58,68,238,17 .byte 102,68,15,58,68,231,0 xorps %xmm11,%xmm3 xorps %xmm13,%xmm5 leaq 64(%rdx),%rdx subq $0x40,%rcx jc .Ltail4x jmp .Lmod4_loop .align 32 .Lmod4_loop: .byte 102,65,15,58,68,199,0 xorps %xmm12,%xmm4 movdqu 48(%rdx),%xmm11 .byte 102,69,15,56,0,218 .byte 102,65,15,58,68,207,17 xorps %xmm3,%xmm0 movdqu 32(%rdx),%xmm3 movdqa %xmm11,%xmm13 .byte 102,68,15,58,68,199,16 pshufd $78,%xmm11,%xmm12 xorps %xmm5,%xmm1 pxor %xmm11,%xmm12 .byte 102,65,15,56,0,218 movups 32(%rsi),%xmm7 xorps %xmm4,%xmm8 .byte 102,68,15,58,68,218,0 pshufd $78,%xmm3,%xmm4 pxor %xmm0,%xmm8 movdqa %xmm3,%xmm5 pxor %xmm1,%xmm8 pxor %xmm3,%xmm4 movdqa %xmm8,%xmm9 .byte 102,68,15,58,68,234,17 pslldq $8,%xmm8 psrldq $8,%xmm9 pxor %xmm8,%xmm0 movdqa .L7_mask(%rip),%xmm8 pxor %xmm9,%xmm1 .byte 102,76,15,110,200 pand %xmm0,%xmm8 .byte 102,69,15,56,0,200 pxor %xmm0,%xmm9 .byte 102,68,15,58,68,231,0 psllq $57,%xmm9 movdqa %xmm9,%xmm8 pslldq $8,%xmm9 .byte 102,15,58,68,222,0 psrldq $8,%xmm8 pxor %xmm9,%xmm0 pxor %xmm8,%xmm1 movdqu 0(%rdx),%xmm8 movdqa %xmm0,%xmm9 psrlq $1,%xmm0 .byte 102,15,58,68,238,17 xorps %xmm11,%xmm3 movdqu 16(%rdx),%xmm11 .byte 102,69,15,56,0,218 .byte 102,15,58,68,231,16 xorps %xmm13,%xmm5 movups 80(%rsi),%xmm7 .byte 102,69,15,56,0,194 pxor %xmm9,%xmm1 pxor %xmm0,%xmm9 psrlq $5,%xmm0 movdqa %xmm11,%xmm13 pxor %xmm12,%xmm4 pshufd $78,%xmm11,%xmm12 pxor %xmm9,%xmm0 pxor %xmm8,%xmm1 pxor %xmm11,%xmm12 .byte 102,69,15,58,68,222,0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 movdqa %xmm0,%xmm1 .byte 102,69,15,58,68,238,17 xorps %xmm11,%xmm3 pshufd $78,%xmm0,%xmm8 pxor %xmm0,%xmm8 .byte 102,68,15,58,68,231,0 xorps %xmm13,%xmm5 leaq 64(%rdx),%rdx subq $0x40,%rcx jnc .Lmod4_loop .Ltail4x: .byte 102,65,15,58,68,199,0 .byte 102,65,15,58,68,207,17 .byte 102,68,15,58,68,199,16 xorps %xmm12,%xmm4 xorps %xmm3,%xmm0 xorps %xmm5,%xmm1 pxor %xmm0,%xmm1 pxor %xmm4,%xmm8 pxor %xmm1,%xmm8 pxor %xmm0,%xmm1 movdqa %xmm8,%xmm9 psrldq $8,%xmm8 pslldq $8,%xmm9 pxor %xmm8,%xmm1 pxor %xmm9,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 addq $0x40,%rcx jz .Ldone movdqu 32(%rsi),%xmm7 subq $0x10,%rcx jz .Lodd_tail .Lskip4x: movdqu (%rdx),%xmm8 movdqu 16(%rdx),%xmm3 .byte 102,69,15,56,0,194 .byte 102,65,15,56,0,218 pxor %xmm8,%xmm0 movdqa %xmm3,%xmm5 pshufd $78,%xmm3,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,68,218,0 .byte 102,15,58,68,234,17 .byte 102,15,58,68,231,0 leaq 32(%rdx),%rdx nop subq $0x20,%rcx jbe .Leven_tail nop jmp .Lmod_loop .align 32 .Lmod_loop: movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm8 pshufd $78,%xmm0,%xmm4 pxor %xmm0,%xmm4 .byte 102,15,58,68,198,0 .byte 102,15,58,68,206,17 .byte 102,15,58,68,231,16 pxor %xmm3,%xmm0 pxor %xmm5,%xmm1 movdqu (%rdx),%xmm9 pxor %xmm0,%xmm8 .byte 102,69,15,56,0,202 movdqu 16(%rdx),%xmm3 pxor %xmm1,%xmm8 pxor %xmm9,%xmm1 pxor %xmm8,%xmm4 .byte 102,65,15,56,0,218 movdqa %xmm4,%xmm8 psrldq $8,%xmm8 pslldq $8,%xmm4 pxor %xmm8,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm3,%xmm5 movdqa %xmm0,%xmm9 movdqa %xmm0,%xmm8 psllq $5,%xmm0 pxor %xmm0,%xmm8 .byte 102,15,58,68,218,0 psllq $1,%xmm0 pxor %xmm8,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm8 pslldq $8,%xmm0 psrldq $8,%xmm8 pxor %xmm9,%xmm0 pshufd $78,%xmm5,%xmm4 pxor %xmm8,%xmm1 pxor %xmm5,%xmm4 movdqa %xmm0,%xmm9 psrlq $1,%xmm0 .byte 102,15,58,68,234,17 pxor %xmm9,%xmm1 pxor %xmm0,%xmm9 psrlq $5,%xmm0 pxor %xmm9,%xmm0 leaq 32(%rdx),%rdx psrlq $1,%xmm0 .byte 102,15,58,68,231,0 pxor %xmm1,%xmm0 subq $0x20,%rcx ja .Lmod_loop .Leven_tail: movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm8 pshufd $78,%xmm0,%xmm4 pxor %xmm0,%xmm4 .byte 102,15,58,68,198,0 .byte 102,15,58,68,206,17 .byte 102,15,58,68,231,16 pxor %xmm3,%xmm0 pxor %xmm5,%xmm1 pxor %xmm0,%xmm8 pxor %xmm1,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm8 psrldq $8,%xmm8 pslldq $8,%xmm4 pxor %xmm8,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 testq %rcx,%rcx jnz .Ldone .Lodd_tail: movdqu (%rdx),%xmm8 .byte 102,69,15,56,0,194 pxor %xmm8,%xmm0 movdqa %xmm0,%xmm1 pshufd $78,%xmm0,%xmm3 pxor %xmm0,%xmm3 .byte 102,15,58,68,194,0 .byte 102,15,58,68,202,17 .byte 102,15,58,68,223,0 pxor %xmm0,%xmm3 pxor %xmm1,%xmm3 movdqa %xmm3,%xmm4 psrldq $8,%xmm3 pslldq $8,%xmm4 pxor %xmm3,%xmm1 pxor %xmm4,%xmm0 movdqa %xmm0,%xmm4 movdqa %xmm0,%xmm3 psllq $5,%xmm0 pxor %xmm0,%xmm3 psllq $1,%xmm0 pxor %xmm3,%xmm0 psllq $57,%xmm0 movdqa %xmm0,%xmm3 pslldq $8,%xmm0 psrldq $8,%xmm3 pxor %xmm4,%xmm0 pxor %xmm3,%xmm1 movdqa %xmm0,%xmm4 psrlq $1,%xmm0 pxor %xmm4,%xmm1 pxor %xmm0,%xmm4 psrlq $5,%xmm0 pxor %xmm4,%xmm0 psrlq $1,%xmm0 pxor %xmm1,%xmm0 .Ldone: .byte 102,65,15,56,0,194 movdqu %xmm0,(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size gcm_ghash_clmul,.-gcm_ghash_clmul .globl gcm_init_avx .hidden gcm_init_avx .type gcm_init_avx,@function .align 32 gcm_init_avx: .cfi_startproc _CET_ENDBR vzeroupper vmovdqu (%rsi),%xmm2 vpshufd $78,%xmm2,%xmm2 vpshufd $255,%xmm2,%xmm4 vpsrlq $63,%xmm2,%xmm3 vpsllq $1,%xmm2,%xmm2 vpxor %xmm5,%xmm5,%xmm5 vpcmpgtd %xmm4,%xmm5,%xmm5 vpslldq $8,%xmm3,%xmm3 vpor %xmm3,%xmm2,%xmm2 vpand .L0x1c2_polynomial(%rip),%xmm5,%xmm5 vpxor %xmm5,%xmm2,%xmm2 vpunpckhqdq %xmm2,%xmm2,%xmm6 vmovdqa %xmm2,%xmm0 vpxor %xmm2,%xmm6,%xmm6 movq $4,%r10 jmp .Linit_start_avx .align 32 .Linit_loop_avx: vpalignr $8,%xmm3,%xmm4,%xmm5 vmovdqu %xmm5,-16(%rdi) vpunpckhqdq %xmm0,%xmm0,%xmm3 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm2,%xmm0,%xmm1 vpclmulqdq $0x00,%xmm2,%xmm0,%xmm0 vpclmulqdq $0x00,%xmm6,%xmm3,%xmm3 vpxor %xmm0,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $8,%xmm3,%xmm4 vpsrldq $8,%xmm3,%xmm3 vpxor %xmm4,%xmm0,%xmm0 vpxor %xmm3,%xmm1,%xmm1 vpsllq $57,%xmm0,%xmm3 vpsllq $62,%xmm0,%xmm4 vpxor %xmm3,%xmm4,%xmm4 vpsllq $63,%xmm0,%xmm3 vpxor %xmm3,%xmm4,%xmm4 vpslldq $8,%xmm4,%xmm3 vpsrldq $8,%xmm4,%xmm4 vpxor %xmm3,%xmm0,%xmm0 vpxor %xmm4,%xmm1,%xmm1 vpsrlq $1,%xmm0,%xmm4 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm4,%xmm0,%xmm0 vpsrlq $5,%xmm4,%xmm4 vpxor %xmm4,%xmm0,%xmm0 vpsrlq $1,%xmm0,%xmm0 vpxor %xmm1,%xmm0,%xmm0 .Linit_start_avx: vmovdqa %xmm0,%xmm5 vpunpckhqdq %xmm0,%xmm0,%xmm3 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm2,%xmm0,%xmm1 vpclmulqdq $0x00,%xmm2,%xmm0,%xmm0 vpclmulqdq $0x00,%xmm6,%xmm3,%xmm3 vpxor %xmm0,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $8,%xmm3,%xmm4 vpsrldq $8,%xmm3,%xmm3 vpxor %xmm4,%xmm0,%xmm0 vpxor %xmm3,%xmm1,%xmm1 vpsllq $57,%xmm0,%xmm3 vpsllq $62,%xmm0,%xmm4 vpxor %xmm3,%xmm4,%xmm4 vpsllq $63,%xmm0,%xmm3 vpxor %xmm3,%xmm4,%xmm4 vpslldq $8,%xmm4,%xmm3 vpsrldq $8,%xmm4,%xmm4 vpxor %xmm3,%xmm0,%xmm0 vpxor %xmm4,%xmm1,%xmm1 vpsrlq $1,%xmm0,%xmm4 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm4,%xmm0,%xmm0 vpsrlq $5,%xmm4,%xmm4 vpxor %xmm4,%xmm0,%xmm0 vpsrlq $1,%xmm0,%xmm0 vpxor %xmm1,%xmm0,%xmm0 vpshufd $78,%xmm5,%xmm3 vpshufd $78,%xmm0,%xmm4 vpxor %xmm5,%xmm3,%xmm3 vmovdqu %xmm5,0(%rdi) vpxor %xmm0,%xmm4,%xmm4 vmovdqu %xmm0,16(%rdi) leaq 48(%rdi),%rdi subq $1,%r10 jnz .Linit_loop_avx vpalignr $8,%xmm4,%xmm3,%xmm5 vmovdqu %xmm5,-16(%rdi) vzeroupper .byte 0xf3,0xc3 .cfi_endproc .size gcm_init_avx,.-gcm_init_avx .globl gcm_gmult_avx .hidden gcm_gmult_avx .type gcm_gmult_avx,@function .align 32 gcm_gmult_avx: .cfi_startproc _CET_ENDBR jmp .L_gmult_clmul .cfi_endproc .size gcm_gmult_avx,.-gcm_gmult_avx .globl gcm_ghash_avx .hidden gcm_ghash_avx .type gcm_ghash_avx,@function .align 32 gcm_ghash_avx: .cfi_startproc _CET_ENDBR vzeroupper vmovdqu (%rdi),%xmm10 leaq .L0x1c2_polynomial(%rip),%r10 leaq 64(%rsi),%rsi vmovdqu .Lbswap_mask(%rip),%xmm13 vpshufb %xmm13,%xmm10,%xmm10 cmpq $0x80,%rcx jb .Lshort_avx subq $0x80,%rcx vmovdqu 112(%rdx),%xmm14 vmovdqu 0-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm14 vmovdqu 32-64(%rsi),%xmm7 vpunpckhqdq %xmm14,%xmm14,%xmm9 vmovdqu 96(%rdx),%xmm15 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm14,%xmm9,%xmm9 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 16-64(%rsi),%xmm6 vpunpckhqdq %xmm15,%xmm15,%xmm8 vmovdqu 80(%rdx),%xmm14 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm15,%xmm8,%xmm8 vpshufb %xmm13,%xmm14,%xmm14 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 48-64(%rsi),%xmm6 vpxor %xmm14,%xmm9,%xmm9 vmovdqu 64(%rdx),%xmm15 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 80-64(%rsi),%xmm7 vpshufb %xmm13,%xmm15,%xmm15 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm1,%xmm4,%xmm4 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 64-64(%rsi),%xmm6 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm15,%xmm8,%xmm8 vmovdqu 48(%rdx),%xmm14 vpxor %xmm3,%xmm0,%xmm0 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpxor %xmm4,%xmm1,%xmm1 vpshufb %xmm13,%xmm14,%xmm14 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 96-64(%rsi),%xmm6 vpxor %xmm5,%xmm2,%xmm2 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 128-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vmovdqu 32(%rdx),%xmm15 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm1,%xmm4,%xmm4 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 112-64(%rsi),%xmm6 vpxor %xmm2,%xmm5,%xmm5 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm15,%xmm8,%xmm8 vmovdqu 16(%rdx),%xmm14 vpxor %xmm3,%xmm0,%xmm0 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpxor %xmm4,%xmm1,%xmm1 vpshufb %xmm13,%xmm14,%xmm14 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 144-64(%rsi),%xmm6 vpxor %xmm5,%xmm2,%xmm2 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 176-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vmovdqu (%rdx),%xmm15 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm1,%xmm4,%xmm4 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 160-64(%rsi),%xmm6 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x10,%xmm7,%xmm9,%xmm2 leaq 128(%rdx),%rdx cmpq $0x80,%rcx jb .Ltail_avx vpxor %xmm10,%xmm15,%xmm15 subq $0x80,%rcx jmp .Loop8x_avx .align 32 .Loop8x_avx: vpunpckhqdq %xmm15,%xmm15,%xmm8 vmovdqu 112(%rdx),%xmm14 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm15,%xmm8,%xmm8 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm10 vpshufb %xmm13,%xmm14,%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm11 vmovdqu 0-64(%rsi),%xmm6 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm12 vmovdqu 32-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vmovdqu 96(%rdx),%xmm15 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpxor %xmm3,%xmm10,%xmm10 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vxorps %xmm4,%xmm11,%xmm11 vmovdqu 16-64(%rsi),%xmm6 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm5,%xmm12,%xmm12 vxorps %xmm15,%xmm8,%xmm8 vmovdqu 80(%rdx),%xmm14 vpxor %xmm10,%xmm12,%xmm12 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpxor %xmm11,%xmm12,%xmm12 vpslldq $8,%xmm12,%xmm9 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vpsrldq $8,%xmm12,%xmm12 vpxor %xmm9,%xmm10,%xmm10 vmovdqu 48-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm14 vxorps %xmm12,%xmm11,%xmm11 vpxor %xmm1,%xmm4,%xmm4 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 80-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vpxor %xmm2,%xmm5,%xmm5 vmovdqu 64(%rdx),%xmm15 vpalignr $8,%xmm10,%xmm10,%xmm12 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpshufb %xmm13,%xmm15,%xmm15 vpxor %xmm3,%xmm0,%xmm0 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 64-64(%rsi),%xmm6 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vxorps %xmm15,%xmm8,%xmm8 vpxor %xmm5,%xmm2,%xmm2 vmovdqu 48(%rdx),%xmm14 vpclmulqdq $0x10,(%r10),%xmm10,%xmm10 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpshufb %xmm13,%xmm14,%xmm14 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 96-64(%rsi),%xmm6 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 128-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vpxor %xmm2,%xmm5,%xmm5 vmovdqu 32(%rdx),%xmm15 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpshufb %xmm13,%xmm15,%xmm15 vpxor %xmm3,%xmm0,%xmm0 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 112-64(%rsi),%xmm6 vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2 vpxor %xmm15,%xmm8,%xmm8 vpxor %xmm5,%xmm2,%xmm2 vxorps %xmm12,%xmm10,%xmm10 vmovdqu 16(%rdx),%xmm14 vpalignr $8,%xmm10,%xmm10,%xmm12 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3 vpshufb %xmm13,%xmm14,%xmm14 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4 vmovdqu 144-64(%rsi),%xmm6 vpclmulqdq $0x10,(%r10),%xmm10,%xmm10 vxorps %xmm11,%xmm12,%xmm12 vpunpckhqdq %xmm14,%xmm14,%xmm9 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5 vmovdqu 176-64(%rsi),%xmm7 vpxor %xmm14,%xmm9,%xmm9 vpxor %xmm2,%xmm5,%xmm5 vmovdqu (%rdx),%xmm15 vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0 vpshufb %xmm13,%xmm15,%xmm15 vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1 vmovdqu 160-64(%rsi),%xmm6 vpxor %xmm12,%xmm15,%xmm15 vpclmulqdq $0x10,%xmm7,%xmm9,%xmm2 vpxor %xmm10,%xmm15,%xmm15 leaq 128(%rdx),%rdx subq $0x80,%rcx jnc .Loop8x_avx addq $0x80,%rcx jmp .Ltail_no_xor_avx .align 32 .Lshort_avx: vmovdqu -16(%rdx,%rcx,1),%xmm14 leaq (%rdx,%rcx,1),%rdx vmovdqu 0-64(%rsi),%xmm6 vmovdqu 32-64(%rsi),%xmm7 vpshufb %xmm13,%xmm14,%xmm15 vmovdqa %xmm0,%xmm3 vmovdqa %xmm1,%xmm4 vmovdqa %xmm2,%xmm5 subq $0x10,%rcx jz .Ltail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -32(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 16-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vpsrldq $8,%xmm7,%xmm7 subq $0x10,%rcx jz .Ltail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -48(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 48-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vmovdqu 80-64(%rsi),%xmm7 subq $0x10,%rcx jz .Ltail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -64(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 64-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vpsrldq $8,%xmm7,%xmm7 subq $0x10,%rcx jz .Ltail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -80(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 96-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vmovdqu 128-64(%rsi),%xmm7 subq $0x10,%rcx jz .Ltail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -96(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 112-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vpsrldq $8,%xmm7,%xmm7 subq $0x10,%rcx jz .Ltail_avx vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vmovdqu -112(%rdx),%xmm14 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vmovdqu 144-64(%rsi),%xmm6 vpshufb %xmm13,%xmm14,%xmm15 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vmovq 184-64(%rsi),%xmm7 subq $0x10,%rcx jmp .Ltail_avx .align 32 .Ltail_avx: vpxor %xmm10,%xmm15,%xmm15 .Ltail_no_xor_avx: vpunpckhqdq %xmm15,%xmm15,%xmm8 vpxor %xmm0,%xmm3,%xmm3 vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0 vpxor %xmm15,%xmm8,%xmm8 vpxor %xmm1,%xmm4,%xmm4 vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1 vpxor %xmm2,%xmm5,%xmm5 vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2 vmovdqu (%r10),%xmm12 vpxor %xmm0,%xmm3,%xmm10 vpxor %xmm1,%xmm4,%xmm11 vpxor %xmm2,%xmm5,%xmm5 vpxor %xmm10,%xmm5,%xmm5 vpxor %xmm11,%xmm5,%xmm5 vpslldq $8,%xmm5,%xmm9 vpsrldq $8,%xmm5,%xmm5 vpxor %xmm9,%xmm10,%xmm10 vpxor %xmm5,%xmm11,%xmm11 vpclmulqdq $0x10,%xmm12,%xmm10,%xmm9 vpalignr $8,%xmm10,%xmm10,%xmm10 vpxor %xmm9,%xmm10,%xmm10 vpclmulqdq $0x10,%xmm12,%xmm10,%xmm9 vpalignr $8,%xmm10,%xmm10,%xmm10 vpxor %xmm11,%xmm10,%xmm10 vpxor %xmm9,%xmm10,%xmm10 cmpq $0,%rcx jne .Lshort_avx vpshufb %xmm13,%xmm10,%xmm10 vmovdqu %xmm10,(%rdi) vzeroupper .byte 0xf3,0xc3 .cfi_endproc .size gcm_ghash_avx,.-gcm_ghash_avx .section .rodata .align 64 .Lbswap_mask: .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 .L0x1c2_polynomial: .byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2 .L7_mask: .long 7,0,7,0 .align 64 .byte 71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 64 .text #endif
marvin-hansen/iggy-streaming-system
3,299,348
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/aesni-gcm-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl gcm_init_avx512 .hidden gcm_init_avx512 .hidden gcm_init_avx512 .type gcm_init_avx512,@function .align 32 gcm_init_avx512: .cfi_startproc .byte 243,15,30,250 vmovdqu64 (%rsi),%xmm16 vpalignr $8,%xmm16,%xmm16,%xmm16 vmovdqa64 %xmm16,%xmm2 vpsllq $1,%xmm16,%xmm16 vpsrlq $63,%xmm2,%xmm2 vmovdqa %xmm2,%xmm1 vpslldq $8,%xmm2,%xmm2 vpsrldq $8,%xmm1,%xmm1 vporq %xmm2,%xmm16,%xmm16 vpshufd $36,%xmm1,%xmm2 vpcmpeqd TWOONE(%rip),%xmm2,%xmm2 vpand POLY(%rip),%xmm2,%xmm2 vpxorq %xmm2,%xmm16,%xmm16 vmovdqu64 %xmm16,240(%rdi) vshufi32x4 $0x00,%ymm16,%ymm16,%ymm4 vmovdqa %ymm4,%ymm3 .byte 98,243,101,40,68,196,17 .byte 98,243,101,40,68,204,0 .byte 98,243,101,40,68,212,1 .byte 98,243,101,40,68,220,16 vpxorq %ymm2,%ymm3,%ymm3 vpsrldq $8,%ymm3,%ymm2 vpslldq $8,%ymm3,%ymm3 vpxorq %ymm2,%ymm0,%ymm0 vpxorq %ymm1,%ymm3,%ymm3 vmovdqu64 POLY2(%rip),%ymm2 .byte 98,243,109,40,68,203,1 vpslldq $8,%ymm1,%ymm1 vpxorq %ymm1,%ymm3,%ymm3 .byte 98,243,109,40,68,203,0 vpsrldq $4,%ymm1,%ymm1 .byte 98,243,109,40,68,219,16 vpslldq $4,%ymm3,%ymm3 vpternlogq $0x96,%ymm1,%ymm0,%ymm3 vmovdqu64 %xmm3,224(%rdi) vinserti64x2 $1,%xmm16,%ymm3,%ymm4 vmovdqa64 %ymm4,%ymm5 .byte 98,243,93,40,68,195,17 .byte 98,243,93,40,68,203,0 .byte 98,243,93,40,68,211,1 .byte 98,243,93,40,68,227,16 vpxorq %ymm2,%ymm4,%ymm4 vpsrldq $8,%ymm4,%ymm2 vpslldq $8,%ymm4,%ymm4 vpxorq %ymm2,%ymm0,%ymm0 vpxorq %ymm1,%ymm4,%ymm4 vmovdqu64 POLY2(%rip),%ymm2 .byte 98,243,109,40,68,204,1 vpslldq $8,%ymm1,%ymm1 vpxorq %ymm1,%ymm4,%ymm4 .byte 98,243,109,40,68,204,0 vpsrldq $4,%ymm1,%ymm1 .byte 98,243,109,40,68,228,16 vpslldq $4,%ymm4,%ymm4 vpternlogq $0x96,%ymm1,%ymm0,%ymm4 vmovdqu64 %ymm4,192(%rdi) vinserti64x4 $1,%ymm5,%zmm4,%zmm4 vshufi64x2 $0x00,%zmm4,%zmm4,%zmm3 vmovdqa64 %zmm4,%zmm5 .byte 98,243,93,72,68,195,17 .byte 98,243,93,72,68,203,0 .byte 98,243,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm2,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm2 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm2,%zmm0,%zmm0 vpxorq %zmm1,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm2 .byte 98,243,109,72,68,204,1 vpslldq $8,%zmm1,%zmm1 vpxorq %zmm1,%zmm4,%zmm4 .byte 98,243,109,72,68,204,0 vpsrldq $4,%zmm1,%zmm1 .byte 98,243,109,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm1,%zmm0,%zmm4 vmovdqu64 %zmm4,128(%rdi) vshufi64x2 $0x00,%zmm4,%zmm4,%zmm3 .byte 98,243,85,72,68,195,17 .byte 98,243,85,72,68,203,0 .byte 98,243,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm2,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm2 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm2,%zmm0,%zmm0 vpxorq %zmm1,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm2 .byte 98,243,109,72,68,205,1 vpslldq $8,%zmm1,%zmm1 vpxorq %zmm1,%zmm5,%zmm5 .byte 98,243,109,72,68,205,0 vpsrldq $4,%zmm1,%zmm1 .byte 98,243,109,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm1,%zmm0,%zmm5 vmovdqu64 %zmm5,64(%rdi) .byte 98,243,93,72,68,195,17 .byte 98,243,93,72,68,203,0 .byte 98,243,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm2,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm2 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm2,%zmm0,%zmm0 vpxorq %zmm1,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm2 .byte 98,243,109,72,68,204,1 vpslldq $8,%zmm1,%zmm1 vpxorq %zmm1,%zmm4,%zmm4 .byte 98,243,109,72,68,204,0 vpsrldq $4,%zmm1,%zmm1 .byte 98,243,109,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm1,%zmm0,%zmm4 vmovdqu64 %zmm4,0(%rdi) vzeroupper .Lexit_init: .byte 0xf3,0xc3 .cfi_endproc .size gcm_init_avx512, .-gcm_init_avx512 .globl gcm_gmult_avx512 .hidden gcm_gmult_avx512 .hidden gcm_gmult_avx512 .type gcm_gmult_avx512,@function .align 32 gcm_gmult_avx512: .cfi_startproc .byte 243,15,30,250 vmovdqu64 (%rdi),%xmm1 vpshufb SHUF_MASK(%rip),%xmm1,%xmm1 vmovdqu64 240(%rsi),%xmm2 .byte 98,243,117,8,68,218,17 .byte 98,243,117,8,68,226,0 .byte 98,243,117,8,68,234,1 .byte 98,243,117,8,68,202,16 vpxorq %xmm5,%xmm1,%xmm1 vpsrldq $8,%xmm1,%xmm5 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm5,%xmm3,%xmm3 vpxorq %xmm4,%xmm1,%xmm1 vmovdqu64 POLY2(%rip),%xmm5 .byte 98,243,85,8,68,225,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm1,%xmm1 .byte 98,243,85,8,68,225,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,243,85,8,68,201,16 vpslldq $4,%xmm1,%xmm1 vpternlogq $0x96,%xmm4,%xmm3,%xmm1 vpshufb SHUF_MASK(%rip),%xmm1,%xmm1 vmovdqu64 %xmm1,(%rdi) vzeroupper .Lexit_gmult: .byte 0xf3,0xc3 .cfi_endproc .size gcm_gmult_avx512, .-gcm_gmult_avx512 .globl gcm_ghash_avx512 .hidden gcm_ghash_avx512 .hidden gcm_ghash_avx512 .type gcm_ghash_avx512,@function .align 32 gcm_ghash_avx512: .cfi_startproc .Lghash_seh_begin: .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 .Lghash_seh_push_rbx: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 .Lghash_seh_push_rbp: pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 .Lghash_seh_push_r12: pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 .Lghash_seh_push_r13: pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 .Lghash_seh_push_r14: pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lghash_seh_push_r15: leaq 0(%rsp),%rbp .cfi_def_cfa_register %rbp .Lghash_seh_setfp: .Lghash_seh_prolog_end: subq $820,%rsp andq $(-64),%rsp vmovdqu64 (%rdi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movq %rdx,%r10 movq %rcx,%r11 orq %r11,%r11 jz .L_CALC_AAD_done_hEgxyDlCngwrfFe xorq %rbx,%rbx vmovdqa64 SHUF_MASK(%rip),%zmm16 .L_get_AAD_loop48x16_hEgxyDlCngwrfFe: cmpq $768,%r11 jl .L_exit_AAD_loop48x16_hEgxyDlCngwrfFe vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 testq %rbx,%rbx jnz .L_skip_hkeys_precomputation_amivrujEyduiFoi vmovdqu64 192(%rsi),%zmm1 vmovdqu64 %zmm1,704(%rsp) vmovdqu64 128(%rsi),%zmm9 vmovdqu64 %zmm9,640(%rsp) vshufi64x2 $0x00,%zmm9,%zmm9,%zmm9 vmovdqu64 64(%rsi),%zmm10 vmovdqu64 %zmm10,576(%rsp) vmovdqu64 0(%rsi),%zmm12 vmovdqu64 %zmm12,512(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,448(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,384(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,320(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,256(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,192(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,128(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,64(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,0(%rsp) .L_skip_hkeys_precomputation_amivrujEyduiFoi: movq $1,%rbx vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 0(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 64(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 128(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 192(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 256(%r10),%zmm11 vmovdqu64 320(%r10),%zmm3 vmovdqu64 384(%r10),%zmm4 vmovdqu64 448(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 256(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 320(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 384(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 448(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 512(%r10),%zmm11 vmovdqu64 576(%r10),%zmm3 vmovdqu64 640(%r10),%zmm4 vmovdqu64 704(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 512(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 576(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 640(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 704(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,115,45,8,68,241,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm6,%xmm9,%xmm14 subq $768,%r11 je .L_CALC_AAD_done_hEgxyDlCngwrfFe addq $768,%r10 jmp .L_get_AAD_loop48x16_hEgxyDlCngwrfFe .L_exit_AAD_loop48x16_hEgxyDlCngwrfFe: cmpq $512,%r11 jl .L_less_than_32x16_hEgxyDlCngwrfFe vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 testq %rbx,%rbx jnz .L_skip_hkeys_precomputation_wcpqaDvsGlbjGoe vmovdqu64 192(%rsi),%zmm1 vmovdqu64 %zmm1,704(%rsp) vmovdqu64 128(%rsi),%zmm9 vmovdqu64 %zmm9,640(%rsp) vshufi64x2 $0x00,%zmm9,%zmm9,%zmm9 vmovdqu64 64(%rsi),%zmm10 vmovdqu64 %zmm10,576(%rsp) vmovdqu64 0(%rsi),%zmm12 vmovdqu64 %zmm12,512(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,448(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,384(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,320(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,256(%rsp) .L_skip_hkeys_precomputation_wcpqaDvsGlbjGoe: movq $1,%rbx vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 256(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 320(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 384(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 448(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 256(%r10),%zmm11 vmovdqu64 320(%r10),%zmm3 vmovdqu64 384(%r10),%zmm4 vmovdqu64 448(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 512(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 576(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 640(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 704(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,115,45,8,68,241,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm6,%xmm9,%xmm14 subq $512,%r11 je .L_CALC_AAD_done_hEgxyDlCngwrfFe addq $512,%r10 jmp .L_less_than_16x16_hEgxyDlCngwrfFe .L_less_than_32x16_hEgxyDlCngwrfFe: cmpq $256,%r11 jl .L_less_than_16x16_hEgxyDlCngwrfFe vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 0(%rsi),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 64(%rsi),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 128(%rsi),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 192(%rsi),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,115,45,8,68,241,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm6,%xmm9,%xmm14 subq $256,%r11 je .L_CALC_AAD_done_hEgxyDlCngwrfFe addq $256,%r10 .L_less_than_16x16_hEgxyDlCngwrfFe: leaq byte64_len_to_mask_table(%rip),%r12 leaq (%r12,%r11,8),%r12 addl $15,%r11d shrl $4,%r11d cmpl $2,%r11d jb .L_AAD_blocks_1_hEgxyDlCngwrfFe je .L_AAD_blocks_2_hEgxyDlCngwrfFe cmpl $4,%r11d jb .L_AAD_blocks_3_hEgxyDlCngwrfFe je .L_AAD_blocks_4_hEgxyDlCngwrfFe cmpl $6,%r11d jb .L_AAD_blocks_5_hEgxyDlCngwrfFe je .L_AAD_blocks_6_hEgxyDlCngwrfFe cmpl $8,%r11d jb .L_AAD_blocks_7_hEgxyDlCngwrfFe je .L_AAD_blocks_8_hEgxyDlCngwrfFe cmpl $10,%r11d jb .L_AAD_blocks_9_hEgxyDlCngwrfFe je .L_AAD_blocks_10_hEgxyDlCngwrfFe cmpl $12,%r11d jb .L_AAD_blocks_11_hEgxyDlCngwrfFe je .L_AAD_blocks_12_hEgxyDlCngwrfFe cmpl $14,%r11d jb .L_AAD_blocks_13_hEgxyDlCngwrfFe je .L_AAD_blocks_14_hEgxyDlCngwrfFe cmpl $15,%r11d je .L_AAD_blocks_15_hEgxyDlCngwrfFe .L_AAD_blocks_16_hEgxyDlCngwrfFe: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%zmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 0(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 64(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 128(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm9,%zmm11,%zmm1 vpternlogq $0x96,%zmm10,%zmm3,%zmm6 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm12,%zmm11,%zmm7 vpternlogq $0x96,%zmm13,%zmm3,%zmm8 vmovdqu64 192(%rsi),%zmm15 .byte 98,83,85,72,68,207,17 .byte 98,83,85,72,68,215,0 .byte 98,83,85,72,68,231,1 .byte 98,83,85,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_15_hEgxyDlCngwrfFe: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%zmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 16(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 80(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 144(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 208(%rsi),%ymm15 vinserti64x2 $2,240(%rsi),%zmm15,%zmm15 .byte 98,211,85,72,68,255,1 .byte 98,83,85,72,68,199,16 .byte 98,211,85,72,68,207,17 .byte 98,211,85,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_14_hEgxyDlCngwrfFe: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%ymm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %ymm16,%ymm5,%ymm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 32(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 96(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 160(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 224(%rsi),%ymm15 .byte 98,211,85,40,68,255,1 .byte 98,83,85,40,68,199,16 .byte 98,211,85,40,68,207,17 .byte 98,211,85,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_13_hEgxyDlCngwrfFe: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%xmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %xmm16,%xmm5,%xmm5 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 48(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 112(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 176(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 240(%rsi),%xmm15 .byte 98,211,85,8,68,255,1 .byte 98,83,85,8,68,199,16 .byte 98,211,85,8,68,207,17 .byte 98,211,85,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_12_hEgxyDlCngwrfFe: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 64(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 128(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 192(%rsi),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_11_hEgxyDlCngwrfFe: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 80(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 144(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 208(%rsi),%ymm15 vinserti64x2 $2,240(%rsi),%zmm15,%zmm15 .byte 98,211,93,72,68,255,1 .byte 98,83,93,72,68,199,16 .byte 98,211,93,72,68,207,17 .byte 98,211,93,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_10_hEgxyDlCngwrfFe: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%ymm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %ymm16,%ymm4,%ymm4 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 96(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 160(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 224(%rsi),%ymm15 .byte 98,211,93,40,68,255,1 .byte 98,83,93,40,68,199,16 .byte 98,211,93,40,68,207,17 .byte 98,211,93,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_9_hEgxyDlCngwrfFe: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%xmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %xmm16,%xmm4,%xmm4 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 112(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 176(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 240(%rsi),%xmm15 .byte 98,211,93,8,68,255,1 .byte 98,83,93,8,68,199,16 .byte 98,211,93,8,68,207,17 .byte 98,211,93,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_8_hEgxyDlCngwrfFe: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 128(%rsi),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 192(%rsi),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_7_hEgxyDlCngwrfFe: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 144(%rsi),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 208(%rsi),%ymm15 vinserti64x2 $2,240(%rsi),%zmm15,%zmm15 .byte 98,211,101,72,68,255,1 .byte 98,83,101,72,68,199,16 .byte 98,211,101,72,68,207,17 .byte 98,211,101,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_6_hEgxyDlCngwrfFe: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%ymm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %ymm16,%ymm3,%ymm3 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 160(%rsi),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 224(%rsi),%ymm15 .byte 98,211,101,40,68,255,1 .byte 98,83,101,40,68,199,16 .byte 98,211,101,40,68,207,17 .byte 98,211,101,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_5_hEgxyDlCngwrfFe: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%xmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %xmm16,%xmm3,%xmm3 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 176(%rsi),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 240(%rsi),%xmm15 .byte 98,211,101,8,68,255,1 .byte 98,83,101,8,68,199,16 .byte 98,211,101,8,68,207,17 .byte 98,211,101,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_4_hEgxyDlCngwrfFe: kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 192(%rsi),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_3_hEgxyDlCngwrfFe: kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 208(%rsi),%ymm15 vinserti64x2 $2,240(%rsi),%zmm15,%zmm15 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_2_hEgxyDlCngwrfFe: kmovq (%r12),%k1 vmovdqu8 0(%r10),%ymm11{%k1}{z} vpshufb %ymm16,%ymm11,%ymm11 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 224(%rsi),%ymm15 .byte 98,211,37,40,68,255,1 .byte 98,83,37,40,68,199,16 .byte 98,211,37,40,68,207,17 .byte 98,211,37,40,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 jmp .L_CALC_AAD_done_hEgxyDlCngwrfFe .L_AAD_blocks_1_hEgxyDlCngwrfFe: kmovq (%r12),%k1 vmovdqu8 0(%r10),%xmm11{%k1}{z} vpshufb %xmm16,%xmm11,%xmm11 vpxorq %zmm14,%zmm11,%zmm11 vmovdqu64 240(%rsi),%xmm15 .byte 98,211,37,8,68,255,1 .byte 98,83,37,8,68,199,16 .byte 98,211,37,8,68,207,17 .byte 98,211,37,8,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,115,5,8,68,247,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm1,%xmm8,%xmm14 .L_CALC_AAD_done_hEgxyDlCngwrfFe: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,(%rdi) cmpq $256,%rcx jbe .Lskip_hkeys_cleanup_EmbgEptodyewbFa vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %zmm0,0(%rsp) vmovdqa64 %zmm0,64(%rsp) vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) vmovdqa64 %zmm0,320(%rsp) vmovdqa64 %zmm0,384(%rsp) vmovdqa64 %zmm0,448(%rsp) vmovdqa64 %zmm0,512(%rsp) vmovdqa64 %zmm0,576(%rsp) vmovdqa64 %zmm0,640(%rsp) vmovdqa64 %zmm0,704(%rsp) .Lskip_hkeys_cleanup_EmbgEptodyewbFa: vzeroupper leaq (%rbp),%rsp .cfi_def_cfa_register %rsp popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore %r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore %r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore %rbx .Lexit_ghash: .byte 0xf3,0xc3 .Lghash_seh_end: .cfi_endproc .size gcm_ghash_avx512, .-gcm_ghash_avx512 .globl gcm_setiv_avx512 .hidden gcm_setiv_avx512 .hidden gcm_setiv_avx512 .type gcm_setiv_avx512,@function .align 32 gcm_setiv_avx512: .cfi_startproc .Lsetiv_seh_begin: .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 .Lsetiv_seh_push_rbx: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 .Lsetiv_seh_push_rbp: pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 .Lsetiv_seh_push_r12: pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 .Lsetiv_seh_push_r13: pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 .Lsetiv_seh_push_r14: pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lsetiv_seh_push_r15: leaq 0(%rsp),%rbp .cfi_def_cfa_register %rbp .Lsetiv_seh_setfp: .Lsetiv_seh_prolog_end: subq $820,%rsp andq $(-64),%rsp cmpq $12,%rcx je iv_len_12_init_IV vpxor %xmm2,%xmm2,%xmm2 leaq 80(%rsi),%r13 movq %rdx,%r10 movq %rcx,%r11 orq %r11,%r11 jz .L_CALC_AAD_done_bnzFsuvmDknpsbp xorq %rbx,%rbx vmovdqa64 SHUF_MASK(%rip),%zmm16 .L_get_AAD_loop48x16_bnzFsuvmDknpsbp: cmpq $768,%r11 jl .L_exit_AAD_loop48x16_bnzFsuvmDknpsbp vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 testq %rbx,%rbx jnz .L_skip_hkeys_precomputation_dBmbyqhifbmbobw vmovdqu64 192(%r13),%zmm1 vmovdqu64 %zmm1,704(%rsp) vmovdqu64 128(%r13),%zmm9 vmovdqu64 %zmm9,640(%rsp) vshufi64x2 $0x00,%zmm9,%zmm9,%zmm9 vmovdqu64 64(%r13),%zmm10 vmovdqu64 %zmm10,576(%rsp) vmovdqu64 0(%r13),%zmm12 vmovdqu64 %zmm12,512(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,448(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,384(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,320(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,256(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,192(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,128(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,64(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,0(%rsp) .L_skip_hkeys_precomputation_dBmbyqhifbmbobw: movq $1,%rbx vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 0(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 64(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 128(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 192(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 256(%r10),%zmm11 vmovdqu64 320(%r10),%zmm3 vmovdqu64 384(%r10),%zmm4 vmovdqu64 448(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 256(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 320(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 384(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 448(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 512(%r10),%zmm11 vmovdqu64 576(%r10),%zmm3 vmovdqu64 640(%r10),%zmm4 vmovdqu64 704(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 512(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 576(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 640(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 704(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,243,45,8,68,209,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm6,%xmm9,%xmm2 subq $768,%r11 je .L_CALC_AAD_done_bnzFsuvmDknpsbp addq $768,%r10 jmp .L_get_AAD_loop48x16_bnzFsuvmDknpsbp .L_exit_AAD_loop48x16_bnzFsuvmDknpsbp: cmpq $512,%r11 jl .L_less_than_32x16_bnzFsuvmDknpsbp vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 testq %rbx,%rbx jnz .L_skip_hkeys_precomputation_javBbvtBBkicjdB vmovdqu64 192(%r13),%zmm1 vmovdqu64 %zmm1,704(%rsp) vmovdqu64 128(%r13),%zmm9 vmovdqu64 %zmm9,640(%rsp) vshufi64x2 $0x00,%zmm9,%zmm9,%zmm9 vmovdqu64 64(%r13),%zmm10 vmovdqu64 %zmm10,576(%rsp) vmovdqu64 0(%r13),%zmm12 vmovdqu64 %zmm12,512(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,448(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,384(%rsp) .byte 98,83,45,72,68,233,17 .byte 98,83,45,72,68,249,0 .byte 98,195,45,72,68,201,1 .byte 98,83,45,72,68,209,16 vpxorq %zmm17,%zmm10,%zmm10 vpsrldq $8,%zmm10,%zmm17 vpslldq $8,%zmm10,%zmm10 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm10,%zmm10 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,250,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm10,%zmm10 .byte 98,83,117,64,68,250,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,210,16 vpslldq $4,%zmm10,%zmm10 vpternlogq $0x96,%zmm15,%zmm13,%zmm10 vmovdqu64 %zmm10,320(%rsp) .byte 98,83,29,72,68,233,17 .byte 98,83,29,72,68,249,0 .byte 98,195,29,72,68,201,1 .byte 98,83,29,72,68,225,16 vpxorq %zmm17,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm17 vpslldq $8,%zmm12,%zmm12 vpxorq %zmm17,%zmm13,%zmm13 vpxorq %zmm15,%zmm12,%zmm12 vmovdqu64 POLY2(%rip),%zmm17 .byte 98,83,117,64,68,252,1 vpslldq $8,%zmm15,%zmm15 vpxorq %zmm15,%zmm12,%zmm12 .byte 98,83,117,64,68,252,0 vpsrldq $4,%zmm15,%zmm15 .byte 98,83,117,64,68,228,16 vpslldq $4,%zmm12,%zmm12 vpternlogq $0x96,%zmm15,%zmm13,%zmm12 vmovdqu64 %zmm12,256(%rsp) .L_skip_hkeys_precomputation_javBbvtBBkicjdB: movq $1,%rbx vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 256(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 320(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 384(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 448(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 256(%r10),%zmm11 vmovdqu64 320(%r10),%zmm3 vmovdqu64 384(%r10),%zmm4 vmovdqu64 448(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vmovdqu64 512(%rsp),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 576(%rsp),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 640(%rsp),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 704(%rsp),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,243,45,8,68,209,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm6,%xmm9,%xmm2 subq $512,%r11 je .L_CALC_AAD_done_bnzFsuvmDknpsbp addq $512,%r10 jmp .L_less_than_16x16_bnzFsuvmDknpsbp .L_less_than_32x16_bnzFsuvmDknpsbp: cmpq $256,%r11 jl .L_less_than_16x16_bnzFsuvmDknpsbp vmovdqu64 0(%r10),%zmm11 vmovdqu64 64(%r10),%zmm3 vmovdqu64 128(%r10),%zmm4 vmovdqu64 192(%r10),%zmm5 vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 0(%r13),%zmm19 .byte 98,179,37,72,68,203,17 .byte 98,51,37,72,68,203,0 .byte 98,51,37,72,68,211,1 .byte 98,51,37,72,68,227,16 vmovdqu64 64(%r13),%zmm19 .byte 98,51,101,72,68,235,17 .byte 98,51,101,72,68,251,0 .byte 98,163,101,72,68,203,1 .byte 98,163,101,72,68,211,16 vpxorq %zmm17,%zmm10,%zmm7 vpxorq %zmm13,%zmm1,%zmm6 vpxorq %zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vmovdqu64 128(%r13),%zmm19 .byte 98,179,93,72,68,203,17 .byte 98,51,93,72,68,203,0 .byte 98,51,93,72,68,211,1 .byte 98,51,93,72,68,227,16 vmovdqu64 192(%r13),%zmm19 .byte 98,51,85,72,68,235,17 .byte 98,51,85,72,68,251,0 .byte 98,163,85,72,68,203,1 .byte 98,163,85,72,68,211,16 vpternlogq $0x96,%zmm17,%zmm10,%zmm7 vpternlogq $0x96,%zmm13,%zmm1,%zmm6 vpternlogq $0x96,%zmm15,%zmm9,%zmm8 vpternlogq $0x96,%zmm18,%zmm12,%zmm7 vpsrldq $8,%zmm7,%zmm1 vpslldq $8,%zmm7,%zmm9 vpxorq %zmm1,%zmm6,%zmm6 vpxorq %zmm9,%zmm8,%zmm8 vextracti64x4 $1,%zmm6,%ymm1 vpxorq %ymm1,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm1 vpxorq %xmm1,%xmm6,%xmm6 vextracti64x4 $1,%zmm8,%ymm9 vpxorq %ymm9,%ymm8,%ymm8 vextracti32x4 $1,%ymm8,%xmm9 vpxorq %xmm9,%xmm8,%xmm8 vmovdqa64 POLY2(%rip),%xmm10 .byte 98,211,45,8,68,200,1 vpslldq $8,%xmm1,%xmm1 vpxorq %xmm1,%xmm8,%xmm1 .byte 98,115,45,8,68,201,0 vpsrldq $4,%xmm9,%xmm9 .byte 98,243,45,8,68,209,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm6,%xmm9,%xmm2 subq $256,%r11 je .L_CALC_AAD_done_bnzFsuvmDknpsbp addq $256,%r10 .L_less_than_16x16_bnzFsuvmDknpsbp: leaq byte64_len_to_mask_table(%rip),%r12 leaq (%r12,%r11,8),%r12 addl $15,%r11d shrl $4,%r11d cmpl $2,%r11d jb .L_AAD_blocks_1_bnzFsuvmDknpsbp je .L_AAD_blocks_2_bnzFsuvmDknpsbp cmpl $4,%r11d jb .L_AAD_blocks_3_bnzFsuvmDknpsbp je .L_AAD_blocks_4_bnzFsuvmDknpsbp cmpl $6,%r11d jb .L_AAD_blocks_5_bnzFsuvmDknpsbp je .L_AAD_blocks_6_bnzFsuvmDknpsbp cmpl $8,%r11d jb .L_AAD_blocks_7_bnzFsuvmDknpsbp je .L_AAD_blocks_8_bnzFsuvmDknpsbp cmpl $10,%r11d jb .L_AAD_blocks_9_bnzFsuvmDknpsbp je .L_AAD_blocks_10_bnzFsuvmDknpsbp cmpl $12,%r11d jb .L_AAD_blocks_11_bnzFsuvmDknpsbp je .L_AAD_blocks_12_bnzFsuvmDknpsbp cmpl $14,%r11d jb .L_AAD_blocks_13_bnzFsuvmDknpsbp je .L_AAD_blocks_14_bnzFsuvmDknpsbp cmpl $15,%r11d je .L_AAD_blocks_15_bnzFsuvmDknpsbp .L_AAD_blocks_16_bnzFsuvmDknpsbp: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%zmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 0(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 64(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 128(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm9,%zmm11,%zmm1 vpternlogq $0x96,%zmm10,%zmm3,%zmm6 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm12,%zmm11,%zmm7 vpternlogq $0x96,%zmm13,%zmm3,%zmm8 vmovdqu64 192(%r13),%zmm15 .byte 98,83,85,72,68,207,17 .byte 98,83,85,72,68,215,0 .byte 98,83,85,72,68,231,1 .byte 98,83,85,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_15_bnzFsuvmDknpsbp: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%zmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %zmm16,%zmm5,%zmm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 16(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 80(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 144(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 208(%r13),%ymm15 vinserti64x2 $2,240(%r13),%zmm15,%zmm15 .byte 98,211,85,72,68,255,1 .byte 98,83,85,72,68,199,16 .byte 98,211,85,72,68,207,17 .byte 98,211,85,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_14_bnzFsuvmDknpsbp: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%ymm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %ymm16,%ymm5,%ymm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 32(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 96(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 160(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 224(%r13),%ymm15 .byte 98,211,85,40,68,255,1 .byte 98,83,85,40,68,199,16 .byte 98,211,85,40,68,207,17 .byte 98,211,85,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_13_bnzFsuvmDknpsbp: subq $1536,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4 vmovdqu8 192(%r10),%xmm5{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpshufb %xmm16,%xmm5,%xmm5 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 48(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 112(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 176(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vmovdqu64 240(%r13),%xmm15 .byte 98,211,85,8,68,255,1 .byte 98,83,85,8,68,199,16 .byte 98,211,85,8,68,207,17 .byte 98,211,85,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_12_bnzFsuvmDknpsbp: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 64(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 128(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vmovdqu64 192(%r13),%zmm15 .byte 98,83,93,72,68,223,17 .byte 98,211,93,72,68,223,0 vpternlogq $0x96,%zmm1,%zmm11,%zmm9 vpternlogq $0x96,%zmm6,%zmm3,%zmm10 .byte 98,83,93,72,68,223,1 .byte 98,211,93,72,68,223,16 vpternlogq $0x96,%zmm7,%zmm11,%zmm12 vpternlogq $0x96,%zmm8,%zmm3,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_11_bnzFsuvmDknpsbp: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%zmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %zmm16,%zmm4,%zmm4 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 80(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 144(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 208(%r13),%ymm15 vinserti64x2 $2,240(%r13),%zmm15,%zmm15 .byte 98,211,93,72,68,255,1 .byte 98,83,93,72,68,199,16 .byte 98,211,93,72,68,207,17 .byte 98,211,93,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_10_bnzFsuvmDknpsbp: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%ymm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %ymm16,%ymm4,%ymm4 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 96(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 160(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 224(%r13),%ymm15 .byte 98,211,93,40,68,255,1 .byte 98,83,93,40,68,199,16 .byte 98,211,93,40,68,207,17 .byte 98,211,93,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_9_bnzFsuvmDknpsbp: subq $1024,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3 vmovdqu8 128(%r10),%xmm4{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpshufb %xmm16,%xmm4,%xmm4 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 112(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 176(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vmovdqu64 240(%r13),%xmm15 .byte 98,211,93,8,68,255,1 .byte 98,83,93,8,68,199,16 .byte 98,211,93,8,68,207,17 .byte 98,211,93,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_8_bnzFsuvmDknpsbp: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 128(%r13),%zmm15 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 vmovdqu64 192(%r13),%zmm15 .byte 98,83,101,72,68,207,17 .byte 98,83,101,72,68,215,0 .byte 98,83,101,72,68,231,1 .byte 98,83,101,72,68,239,16 vpxorq %zmm9,%zmm1,%zmm9 vpxorq %zmm10,%zmm6,%zmm10 vpxorq %zmm12,%zmm7,%zmm12 vpxorq %zmm13,%zmm8,%zmm13 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_7_bnzFsuvmDknpsbp: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%zmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %zmm16,%zmm3,%zmm3 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 144(%r13),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 208(%r13),%ymm15 vinserti64x2 $2,240(%r13),%zmm15,%zmm15 .byte 98,211,101,72,68,255,1 .byte 98,83,101,72,68,199,16 .byte 98,211,101,72,68,207,17 .byte 98,211,101,72,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_6_bnzFsuvmDknpsbp: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%ymm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %ymm16,%ymm3,%ymm3 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 160(%r13),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 224(%r13),%ymm15 .byte 98,211,101,40,68,255,1 .byte 98,83,101,40,68,199,16 .byte 98,211,101,40,68,207,17 .byte 98,211,101,40,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_5_bnzFsuvmDknpsbp: subq $512,%r12 kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11 vmovdqu8 64(%r10),%xmm3{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpshufb %xmm16,%xmm3,%xmm3 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 176(%r13),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vmovdqu64 240(%r13),%xmm15 .byte 98,211,101,8,68,255,1 .byte 98,83,101,8,68,199,16 .byte 98,211,101,8,68,207,17 .byte 98,211,101,8,68,247,0 vpxorq %zmm12,%zmm7,%zmm7 vpxorq %zmm13,%zmm8,%zmm8 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_4_bnzFsuvmDknpsbp: kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 192(%r13),%zmm15 .byte 98,83,37,72,68,207,17 .byte 98,83,37,72,68,215,0 .byte 98,83,37,72,68,231,1 .byte 98,83,37,72,68,239,16 vpxorq %zmm13,%zmm12,%zmm12 vpsrldq $8,%zmm12,%zmm7 vpslldq $8,%zmm12,%zmm8 vpxorq %zmm7,%zmm9,%zmm1 vpxorq %zmm8,%zmm10,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_3_bnzFsuvmDknpsbp: kmovq (%r12),%k1 vmovdqu8 0(%r10),%zmm11{%k1}{z} vpshufb %zmm16,%zmm11,%zmm11 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 208(%r13),%ymm15 vinserti64x2 $2,240(%r13),%zmm15,%zmm15 .byte 98,211,37,72,68,255,1 .byte 98,83,37,72,68,199,16 .byte 98,211,37,72,68,207,17 .byte 98,211,37,72,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_2_bnzFsuvmDknpsbp: kmovq (%r12),%k1 vmovdqu8 0(%r10),%ymm11{%k1}{z} vpshufb %ymm16,%ymm11,%ymm11 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 224(%r13),%ymm15 .byte 98,211,37,40,68,255,1 .byte 98,83,37,40,68,199,16 .byte 98,211,37,40,68,207,17 .byte 98,211,37,40,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 jmp .L_CALC_AAD_done_bnzFsuvmDknpsbp .L_AAD_blocks_1_bnzFsuvmDknpsbp: kmovq (%r12),%k1 vmovdqu8 0(%r10),%xmm11{%k1}{z} vpshufb %xmm16,%xmm11,%xmm11 vpxorq %zmm2,%zmm11,%zmm11 vmovdqu64 240(%r13),%xmm15 .byte 98,211,37,8,68,255,1 .byte 98,83,37,8,68,199,16 .byte 98,211,37,8,68,207,17 .byte 98,211,37,8,68,247,0 vpxorq %zmm8,%zmm7,%zmm7 vpsrldq $8,%zmm7,%zmm12 vpslldq $8,%zmm7,%zmm13 vpxorq %zmm12,%zmm1,%zmm1 vpxorq %zmm13,%zmm6,%zmm6 vextracti64x4 $1,%zmm1,%ymm12 vpxorq %ymm12,%ymm1,%ymm1 vextracti32x4 $1,%ymm1,%xmm12 vpxorq %xmm12,%xmm1,%xmm1 vextracti64x4 $1,%zmm6,%ymm13 vpxorq %ymm13,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm13 vpxorq %xmm13,%xmm6,%xmm6 vmovdqa64 POLY2(%rip),%xmm15 .byte 98,243,5,8,68,254,1 vpslldq $8,%xmm7,%xmm7 vpxorq %xmm7,%xmm6,%xmm7 .byte 98,115,5,8,68,199,0 vpsrldq $4,%xmm8,%xmm8 .byte 98,243,5,8,68,215,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm1,%xmm8,%xmm2 .L_CALC_AAD_done_bnzFsuvmDknpsbp: movq %rcx,%r10 shlq $3,%r10 vmovq %r10,%xmm3 vpxorq %xmm2,%xmm3,%xmm2 vmovdqu64 240(%r13),%xmm1 .byte 98,115,109,8,68,217,17 .byte 98,243,109,8,68,217,0 .byte 98,243,109,8,68,225,1 .byte 98,243,109,8,68,209,16 vpxorq %xmm4,%xmm2,%xmm2 vpsrldq $8,%xmm2,%xmm4 vpslldq $8,%xmm2,%xmm2 vpxorq %xmm4,%xmm11,%xmm11 vpxorq %xmm3,%xmm2,%xmm2 vmovdqu64 POLY2(%rip),%xmm4 .byte 98,243,93,8,68,218,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm2,%xmm2 .byte 98,243,93,8,68,218,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,243,93,8,68,210,16 vpslldq $4,%xmm2,%xmm2 vpternlogq $0x96,%xmm3,%xmm11,%xmm2 vpshufb SHUF_MASK(%rip),%xmm2,%xmm2 jmp skip_iv_len_12_init_IV iv_len_12_init_IV: vmovdqu8 ONEf(%rip),%xmm2 movq %rdx,%r11 movl $0x0000000000000fff,%r10d kmovq %r10,%k1 vmovdqu8 (%r11),%xmm2{%k1} skip_iv_len_12_init_IV: vmovdqu %xmm2,%xmm1 movl 240(%rdi),%r10d cmpl $9,%r10d je .Laes_128_otBvnbdyuroewzD cmpl $11,%r10d je .Laes_192_otBvnbdyuroewzD cmpl $13,%r10d je .Laes_256_otBvnbdyuroewzD jmp .Lexit_aes_otBvnbdyuroewzD .align 32 .Laes_128_otBvnbdyuroewzD: vpxorq 0(%rdi),%xmm1,%xmm1 .byte 98,242,117,8,220,79,1 .byte 98,242,117,8,220,79,2 .byte 98,242,117,8,220,79,3 .byte 98,242,117,8,220,79,4 .byte 98,242,117,8,220,79,5 .byte 98,242,117,8,220,79,6 .byte 98,242,117,8,220,79,7 .byte 98,242,117,8,220,79,8 .byte 98,242,117,8,220,79,9 .byte 98,242,117,8,221,79,10 jmp .Lexit_aes_otBvnbdyuroewzD .align 32 .Laes_192_otBvnbdyuroewzD: vpxorq 0(%rdi),%xmm1,%xmm1 .byte 98,242,117,8,220,79,1 .byte 98,242,117,8,220,79,2 .byte 98,242,117,8,220,79,3 .byte 98,242,117,8,220,79,4 .byte 98,242,117,8,220,79,5 .byte 98,242,117,8,220,79,6 .byte 98,242,117,8,220,79,7 .byte 98,242,117,8,220,79,8 .byte 98,242,117,8,220,79,9 .byte 98,242,117,8,220,79,10 .byte 98,242,117,8,220,79,11 .byte 98,242,117,8,221,79,12 jmp .Lexit_aes_otBvnbdyuroewzD .align 32 .Laes_256_otBvnbdyuroewzD: vpxorq 0(%rdi),%xmm1,%xmm1 .byte 98,242,117,8,220,79,1 .byte 98,242,117,8,220,79,2 .byte 98,242,117,8,220,79,3 .byte 98,242,117,8,220,79,4 .byte 98,242,117,8,220,79,5 .byte 98,242,117,8,220,79,6 .byte 98,242,117,8,220,79,7 .byte 98,242,117,8,220,79,8 .byte 98,242,117,8,220,79,9 .byte 98,242,117,8,220,79,10 .byte 98,242,117,8,220,79,11 .byte 98,242,117,8,220,79,12 .byte 98,242,117,8,220,79,13 .byte 98,242,117,8,221,79,14 jmp .Lexit_aes_otBvnbdyuroewzD .Lexit_aes_otBvnbdyuroewzD: vmovdqu %xmm1,32(%rsi) vpshufb SHUF_MASK(%rip),%xmm2,%xmm2 vmovdqu %xmm2,0(%rsi) .Lexit_setiv: cmpq $256,%rcx jbe .Lskip_hkeys_cleanup_lDGzdqCkvgheosr vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %zmm0,0(%rsp) vmovdqa64 %zmm0,64(%rsp) vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) vmovdqa64 %zmm0,320(%rsp) vmovdqa64 %zmm0,384(%rsp) vmovdqa64 %zmm0,448(%rsp) vmovdqa64 %zmm0,512(%rsp) vmovdqa64 %zmm0,576(%rsp) vmovdqa64 %zmm0,640(%rsp) vmovdqa64 %zmm0,704(%rsp) .Lskip_hkeys_cleanup_lDGzdqCkvgheosr: vzeroupper leaq (%rbp),%rsp .cfi_def_cfa_register %rsp popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore %r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore %r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore %rbx .byte 0xf3,0xc3 .Lsetiv_seh_end: .cfi_endproc .size gcm_setiv_avx512, .-gcm_setiv_avx512 .globl aes_gcm_encrypt_avx512 .hidden aes_gcm_encrypt_avx512 .hidden aes_gcm_encrypt_avx512 .type aes_gcm_encrypt_avx512,@function .align 32 aes_gcm_encrypt_avx512: .cfi_startproc .Lencrypt_seh_begin: #ifdef BORINGSSL_DISPATCH_TEST .extern BORINGSSL_function_hit .hidden BORINGSSL_function_hit movb $1,BORINGSSL_function_hit+7(%rip) #endif .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 .Lencrypt_seh_push_rbx: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 .Lencrypt_seh_push_rbp: pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 .Lencrypt_seh_push_r12: pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 .Lencrypt_seh_push_r13: pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 .Lencrypt_seh_push_r14: pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lencrypt_seh_push_r15: leaq 0(%rsp),%rbp .cfi_def_cfa_register %rbp .Lencrypt_seh_setfp: .Lencrypt_seh_prolog_end: subq $1588,%rsp andq $(-64),%rsp movl 240(%rdi),%eax cmpl $9,%eax je .Laes_gcm_encrypt_128_avx512 cmpl $11,%eax je .Laes_gcm_encrypt_192_avx512 cmpl $13,%eax je .Laes_gcm_encrypt_256_avx512 xorl %eax,%eax jmp .Lexit_gcm_encrypt .align 32 .Laes_gcm_encrypt_128_avx512: orq %r8,%r8 je .L_enc_dec_abort_pzwgkGgbplFqzaB xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je .L_partial_block_done_FkezCgctzlCoEyh movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge .L_no_extra_mask_FkezCgctzlCoEyh subq %r13,%r12 .L_no_extra_mask_FkezCgctzlCoEyh: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm3,%xmm14,%xmm14 cmpq $0,%r13 jl .L_partial_incomplete_FkezCgctzlCoEyh .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp .L_enc_dec_done_FkezCgctzlCoEyh .L_partial_incomplete_FkezCgctzlCoEyh: addl %r8d,(%rdx) movq %r8,%rax .L_enc_dec_done_FkezCgctzlCoEyh: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} .L_partial_block_done_FkezCgctzlCoEyh: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je .L_enc_dec_done_pzwgkGgbplFqzaB cmpq $256,%r8 jbe .L_message_below_equal_16_blocks_pzwgkGgbplFqzaB vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae .L_next_16_overflow_yByFrylbFDFnFCp vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_yByFrylbFDFnFCp .L_next_16_overflow_yByFrylbFDFnFCp: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_yByFrylbFDFnFCp: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_achfkmnqFwjgbDD vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) .L_skip_hkeys_precomputation_achfkmnqFwjgbDD: cmpq $512,%r8 jb .L_message_below_32_blocks_pzwgkGgbplFqzaB cmpb $240,%r15b jae .L_next_16_overflow_xvcFynjeulFjDdF vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_xvcFynjeulFjDdF .L_next_16_overflow_xvcFynjeulFjDdF: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_xvcFynjeulFjDdF: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz .L_skip_hkeys_precomputation_mBcrmCyGfEttetw vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) .L_skip_hkeys_precomputation_mBcrmCyGfEttetw: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb .L_no_more_big_nblocks_pzwgkGgbplFqzaB .L_encrypt_big_nblocks_pzwgkGgbplFqzaB: cmpb $240,%r15b jae .L_16_blocks_overflow_avoAfAGuxmumDjA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_avoAfAGuxmumDjA .L_16_blocks_overflow_avoAfAGuxmumDjA: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_avoAfAGuxmumDjA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_AGgjmjawDklDqyq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_AGgjmjawDklDqyq .L_16_blocks_overflow_AGgjmjawDklDqyq: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_AGgjmjawDklDqyq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_gutvpupplrsoEbw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_gutvpupplrsoEbw .L_16_blocks_overflow_gutvpupplrsoEbw: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_gutvpupplrsoEbw: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae .L_encrypt_big_nblocks_pzwgkGgbplFqzaB .L_no_more_big_nblocks_pzwgkGgbplFqzaB: cmpq $512,%r8 jae .L_encrypt_32_blocks_pzwgkGgbplFqzaB cmpq $256,%r8 jae .L_encrypt_16_blocks_pzwgkGgbplFqzaB .L_encrypt_0_blocks_ghash_32_pzwgkGgbplFqzaB: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_BdcphecxdpdFEsb cmpl $8,%r10d je .L_last_num_blocks_is_8_BdcphecxdpdFEsb jb .L_last_num_blocks_is_7_1_BdcphecxdpdFEsb cmpl $12,%r10d je .L_last_num_blocks_is_12_BdcphecxdpdFEsb jb .L_last_num_blocks_is_11_9_BdcphecxdpdFEsb cmpl $15,%r10d je .L_last_num_blocks_is_15_BdcphecxdpdFEsb ja .L_last_num_blocks_is_16_BdcphecxdpdFEsb cmpl $14,%r10d je .L_last_num_blocks_is_14_BdcphecxdpdFEsb jmp .L_last_num_blocks_is_13_BdcphecxdpdFEsb .L_last_num_blocks_is_11_9_BdcphecxdpdFEsb: cmpl $10,%r10d je .L_last_num_blocks_is_10_BdcphecxdpdFEsb ja .L_last_num_blocks_is_11_BdcphecxdpdFEsb jmp .L_last_num_blocks_is_9_BdcphecxdpdFEsb .L_last_num_blocks_is_7_1_BdcphecxdpdFEsb: cmpl $4,%r10d je .L_last_num_blocks_is_4_BdcphecxdpdFEsb jb .L_last_num_blocks_is_3_1_BdcphecxdpdFEsb cmpl $6,%r10d ja .L_last_num_blocks_is_7_BdcphecxdpdFEsb je .L_last_num_blocks_is_6_BdcphecxdpdFEsb jmp .L_last_num_blocks_is_5_BdcphecxdpdFEsb .L_last_num_blocks_is_3_1_BdcphecxdpdFEsb: cmpl $2,%r10d ja .L_last_num_blocks_is_3_BdcphecxdpdFEsb je .L_last_num_blocks_is_2_BdcphecxdpdFEsb .L_last_num_blocks_is_1_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_azzgqhumkfnyDqm vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_azzgqhumkfnyDqm .L_16_blocks_overflow_azzgqhumkfnyDqm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_azzgqhumkfnyDqm: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_dnmqhGDjDpgnine subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dnmqhGDjDpgnine .L_small_initial_partial_block_dnmqhGDjDpgnine: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_dnmqhGDjDpgnine .L_small_initial_compute_done_dnmqhGDjDpgnine: .L_after_reduction_dnmqhGDjDpgnine: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_2_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_yekhBCebufcAiFh vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_yekhBCebufcAiFh .L_16_blocks_overflow_yekhBCebufcAiFh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_yekhBCebufcAiFh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jwyvkjdvesmxGpv subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jwyvkjdvesmxGpv .L_small_initial_partial_block_jwyvkjdvesmxGpv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jwyvkjdvesmxGpv: orq %r8,%r8 je .L_after_reduction_jwyvkjdvesmxGpv vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jwyvkjdvesmxGpv: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_3_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_usjywjwllaabozc vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_usjywjwllaabozc .L_16_blocks_overflow_usjywjwllaabozc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_usjywjwllaabozc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lmkDAitgFzCCoEA subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lmkDAitgFzCCoEA .L_small_initial_partial_block_lmkDAitgFzCCoEA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lmkDAitgFzCCoEA: orq %r8,%r8 je .L_after_reduction_lmkDAitgFzCCoEA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lmkDAitgFzCCoEA: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_4_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_xobkzaAwcplaFgb vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_xobkzaAwcplaFgb .L_16_blocks_overflow_xobkzaAwcplaFgb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_xobkzaAwcplaFgb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_genGClghdbzBqhw subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_genGClghdbzBqhw .L_small_initial_partial_block_genGClghdbzBqhw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_genGClghdbzBqhw: orq %r8,%r8 je .L_after_reduction_genGClghdbzBqhw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_genGClghdbzBqhw: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_5_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_bpsqdGAhjeggABn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_bpsqdGAhjeggABn .L_16_blocks_overflow_bpsqdGAhjeggABn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_bpsqdGAhjeggABn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wdqrtGpojajFBea subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wdqrtGpojajFBea .L_small_initial_partial_block_wdqrtGpojajFBea: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wdqrtGpojajFBea: orq %r8,%r8 je .L_after_reduction_wdqrtGpojajFBea vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wdqrtGpojajFBea: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_6_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_qmgDCpkysmqcgnB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_qmgDCpkysmqcgnB .L_16_blocks_overflow_qmgDCpkysmqcgnB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_qmgDCpkysmqcgnB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GvjnkpjsgDafsun subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GvjnkpjsgDafsun .L_small_initial_partial_block_GvjnkpjsgDafsun: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GvjnkpjsgDafsun: orq %r8,%r8 je .L_after_reduction_GvjnkpjsgDafsun vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GvjnkpjsgDafsun: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_7_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_jaFyvjvpAfzmwyg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_jaFyvjvpAfzmwyg .L_16_blocks_overflow_jaFyvjvpAfzmwyg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_jaFyvjvpAfzmwyg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iuyAGoBcDewEeiy subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iuyAGoBcDewEeiy .L_small_initial_partial_block_iuyAGoBcDewEeiy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iuyAGoBcDewEeiy: orq %r8,%r8 je .L_after_reduction_iuyAGoBcDewEeiy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iuyAGoBcDewEeiy: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_8_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_FbwsrgpDGDmccid vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_FbwsrgpDGDmccid .L_16_blocks_overflow_FbwsrgpDGDmccid: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_FbwsrgpDGDmccid: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lABtdkpvoGeFpzp subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lABtdkpvoGeFpzp .L_small_initial_partial_block_lABtdkpvoGeFpzp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lABtdkpvoGeFpzp: orq %r8,%r8 je .L_after_reduction_lABtdkpvoGeFpzp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lABtdkpvoGeFpzp: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_9_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_dtxuExFwmpsGEiG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_dtxuExFwmpsGEiG .L_16_blocks_overflow_dtxuExFwmpsGEiG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_dtxuExFwmpsGEiG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vkADoeFsfDwilnv subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vkADoeFsfDwilnv .L_small_initial_partial_block_vkADoeFsfDwilnv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vkADoeFsfDwilnv: orq %r8,%r8 je .L_after_reduction_vkADoeFsfDwilnv vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vkADoeFsfDwilnv: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_10_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_damgrhyFxffganz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_damgrhyFxffganz .L_16_blocks_overflow_damgrhyFxffganz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_damgrhyFxffganz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iankhgrgFnoiAgG subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iankhgrgFnoiAgG .L_small_initial_partial_block_iankhgrgFnoiAgG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iankhgrgFnoiAgG: orq %r8,%r8 je .L_after_reduction_iankhgrgFnoiAgG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iankhgrgFnoiAgG: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_11_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_gnGEkpgDpmugvpk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_gnGEkpgDpmugvpk .L_16_blocks_overflow_gnGEkpgDpmugvpk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_gnGEkpgDpmugvpk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_sECkucceDhaBnCk subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_sECkucceDhaBnCk .L_small_initial_partial_block_sECkucceDhaBnCk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_sECkucceDhaBnCk: orq %r8,%r8 je .L_after_reduction_sECkucceDhaBnCk vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_sECkucceDhaBnCk: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_12_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_qkecuzhoaAuxmmC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_qkecuzhoaAuxmmC .L_16_blocks_overflow_qkecuzhoaAuxmmC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_qkecuzhoaAuxmmC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GEFnxzpzjbtbhxx subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GEFnxzpzjbtbhxx .L_small_initial_partial_block_GEFnxzpzjbtbhxx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GEFnxzpzjbtbhxx: orq %r8,%r8 je .L_after_reduction_GEFnxzpzjbtbhxx vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GEFnxzpzjbtbhxx: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_13_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_BjhkFcriuCnuFez vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_BjhkFcriuCnuFez .L_16_blocks_overflow_BjhkFcriuCnuFez: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_BjhkFcriuCnuFez: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jyxtluvpAmFhjFk subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jyxtluvpAmFhjFk .L_small_initial_partial_block_jyxtluvpAmFhjFk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jyxtluvpAmFhjFk: orq %r8,%r8 je .L_after_reduction_jyxtluvpAmFhjFk vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jyxtluvpAmFhjFk: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_14_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_kGBwgppdvolmGmc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_kGBwgppdvolmGmc .L_16_blocks_overflow_kGBwgppdvolmGmc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_kGBwgppdvolmGmc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AxanimCshomfwbg subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AxanimCshomfwbg .L_small_initial_partial_block_AxanimCshomfwbg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AxanimCshomfwbg: orq %r8,%r8 je .L_after_reduction_AxanimCshomfwbg vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AxanimCshomfwbg: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_15_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_EBkkfjcEDyEptfo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_EBkkfjcEDyEptfo .L_16_blocks_overflow_EBkkfjcEDyEptfo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_EBkkfjcEDyEptfo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_henbgxejEhFgymC subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_henbgxejEhFgymC .L_small_initial_partial_block_henbgxejEhFgymC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_henbgxejEhFgymC: orq %r8,%r8 je .L_after_reduction_henbgxejEhFgymC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_henbgxejEhFgymC: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_16_BdcphecxdpdFEsb: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_BlcvjlyDGzsAttk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_BlcvjlyDGzsAttk .L_16_blocks_overflow_BlcvjlyDGzsAttk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_BlcvjlyDGzsAttk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_aGAffhBljtiFsea: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_aGAffhBljtiFsea: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_aGAffhBljtiFsea: jmp .L_last_blocks_done_BdcphecxdpdFEsb .L_last_num_blocks_is_0_BdcphecxdpdFEsb: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_BdcphecxdpdFEsb: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_pzwgkGgbplFqzaB .L_encrypt_32_blocks_pzwgkGgbplFqzaB: cmpb $240,%r15b jae .L_16_blocks_overflow_zuczDhwqwDAmzjf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_zuczDhwqwDAmzjf .L_16_blocks_overflow_zuczDhwqwDAmzjf: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_zuczDhwqwDAmzjf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_CeGBtrGsogoqpyb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_CeGBtrGsogoqpyb .L_16_blocks_overflow_CeGBtrGsogoqpyb: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_CeGBtrGsogoqpyb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_CfrpfvcjvvrcbGa cmpl $8,%r10d je .L_last_num_blocks_is_8_CfrpfvcjvvrcbGa jb .L_last_num_blocks_is_7_1_CfrpfvcjvvrcbGa cmpl $12,%r10d je .L_last_num_blocks_is_12_CfrpfvcjvvrcbGa jb .L_last_num_blocks_is_11_9_CfrpfvcjvvrcbGa cmpl $15,%r10d je .L_last_num_blocks_is_15_CfrpfvcjvvrcbGa ja .L_last_num_blocks_is_16_CfrpfvcjvvrcbGa cmpl $14,%r10d je .L_last_num_blocks_is_14_CfrpfvcjvvrcbGa jmp .L_last_num_blocks_is_13_CfrpfvcjvvrcbGa .L_last_num_blocks_is_11_9_CfrpfvcjvvrcbGa: cmpl $10,%r10d je .L_last_num_blocks_is_10_CfrpfvcjvvrcbGa ja .L_last_num_blocks_is_11_CfrpfvcjvvrcbGa jmp .L_last_num_blocks_is_9_CfrpfvcjvvrcbGa .L_last_num_blocks_is_7_1_CfrpfvcjvvrcbGa: cmpl $4,%r10d je .L_last_num_blocks_is_4_CfrpfvcjvvrcbGa jb .L_last_num_blocks_is_3_1_CfrpfvcjvvrcbGa cmpl $6,%r10d ja .L_last_num_blocks_is_7_CfrpfvcjvvrcbGa je .L_last_num_blocks_is_6_CfrpfvcjvvrcbGa jmp .L_last_num_blocks_is_5_CfrpfvcjvvrcbGa .L_last_num_blocks_is_3_1_CfrpfvcjvvrcbGa: cmpl $2,%r10d ja .L_last_num_blocks_is_3_CfrpfvcjvvrcbGa je .L_last_num_blocks_is_2_CfrpfvcjvvrcbGa .L_last_num_blocks_is_1_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_dbajrbEcjsFpceD vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_dbajrbEcjsFpceD .L_16_blocks_overflow_dbajrbEcjsFpceD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_dbajrbEcjsFpceD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_qFrfFusofbDaigi subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qFrfFusofbDaigi .L_small_initial_partial_block_qFrfFusofbDaigi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_qFrfFusofbDaigi .L_small_initial_compute_done_qFrfFusofbDaigi: .L_after_reduction_qFrfFusofbDaigi: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_2_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_kgpAeeaoAnozgEF vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_kgpAeeaoAnozgEF .L_16_blocks_overflow_kgpAeeaoAnozgEF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_kgpAeeaoAnozgEF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zaugFxnkqnldtoD subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zaugFxnkqnldtoD .L_small_initial_partial_block_zaugFxnkqnldtoD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zaugFxnkqnldtoD: orq %r8,%r8 je .L_after_reduction_zaugFxnkqnldtoD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_zaugFxnkqnldtoD: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_3_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_kblsDeoCDCisntD vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_kblsDeoCDCisntD .L_16_blocks_overflow_kblsDeoCDCisntD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_kblsDeoCDCisntD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_Ajrbbfyxhsbqszm subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_Ajrbbfyxhsbqszm .L_small_initial_partial_block_Ajrbbfyxhsbqszm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_Ajrbbfyxhsbqszm: orq %r8,%r8 je .L_after_reduction_Ajrbbfyxhsbqszm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_Ajrbbfyxhsbqszm: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_4_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_eGcBplCnDqdtGiy vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_eGcBplCnDqdtGiy .L_16_blocks_overflow_eGcBplCnDqdtGiy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_eGcBplCnDqdtGiy: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xeEmmeAmgryyzGr subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xeEmmeAmgryyzGr .L_small_initial_partial_block_xeEmmeAmgryyzGr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xeEmmeAmgryyzGr: orq %r8,%r8 je .L_after_reduction_xeEmmeAmgryyzGr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_xeEmmeAmgryyzGr: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_5_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_bgsqDFmekFAimag vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_bgsqDFmekFAimag .L_16_blocks_overflow_bgsqDFmekFAimag: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_bgsqDFmekFAimag: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iwszuhryhslDkgD subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iwszuhryhslDkgD .L_small_initial_partial_block_iwszuhryhslDkgD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iwszuhryhslDkgD: orq %r8,%r8 je .L_after_reduction_iwszuhryhslDkgD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iwszuhryhslDkgD: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_6_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_oaGuttEwoetbnjp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_oaGuttEwoetbnjp .L_16_blocks_overflow_oaGuttEwoetbnjp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_oaGuttEwoetbnjp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_pulckbvkcxsatqu subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pulckbvkcxsatqu .L_small_initial_partial_block_pulckbvkcxsatqu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pulckbvkcxsatqu: orq %r8,%r8 je .L_after_reduction_pulckbvkcxsatqu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_pulckbvkcxsatqu: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_7_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_FvhiAqmdFpdFmlp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_FvhiAqmdFpdFmlp .L_16_blocks_overflow_FvhiAqmdFpdFmlp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_FvhiAqmdFpdFmlp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_sqobqxAEFkeiGsu subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_sqobqxAEFkeiGsu .L_small_initial_partial_block_sqobqxAEFkeiGsu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_sqobqxAEFkeiGsu: orq %r8,%r8 je .L_after_reduction_sqobqxAEFkeiGsu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_sqobqxAEFkeiGsu: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_8_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_hwGtCmqmcvackpz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_hwGtCmqmcvackpz .L_16_blocks_overflow_hwGtCmqmcvackpz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_hwGtCmqmcvackpz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_itiiyBtdfcskbai subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_itiiyBtdfcskbai .L_small_initial_partial_block_itiiyBtdfcskbai: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_itiiyBtdfcskbai: orq %r8,%r8 je .L_after_reduction_itiiyBtdfcskbai vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_itiiyBtdfcskbai: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_9_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_DDnhmxjezrilein vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_DDnhmxjezrilein .L_16_blocks_overflow_DDnhmxjezrilein: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_DDnhmxjezrilein: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_bBBEnlialjlpfsp subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_bBBEnlialjlpfsp .L_small_initial_partial_block_bBBEnlialjlpfsp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bBBEnlialjlpfsp: orq %r8,%r8 je .L_after_reduction_bBBEnlialjlpfsp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_bBBEnlialjlpfsp: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_10_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_zCijhbGCeraapou vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_zCijhbGCeraapou .L_16_blocks_overflow_zCijhbGCeraapou: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_zCijhbGCeraapou: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DzFChhwqqhyhjhC subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DzFChhwqqhyhjhC .L_small_initial_partial_block_DzFChhwqqhyhjhC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DzFChhwqqhyhjhC: orq %r8,%r8 je .L_after_reduction_DzFChhwqqhyhjhC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DzFChhwqqhyhjhC: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_11_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_aafwvnrniBpBhGh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_aafwvnrniBpBhGh .L_16_blocks_overflow_aafwvnrniBpBhGh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_aafwvnrniBpBhGh: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ntDAaiasAzzqzla subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ntDAaiasAzzqzla .L_small_initial_partial_block_ntDAaiasAzzqzla: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ntDAaiasAzzqzla: orq %r8,%r8 je .L_after_reduction_ntDAaiasAzzqzla vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ntDAaiasAzzqzla: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_12_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_szlfmGmeuofoAra vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_szlfmGmeuofoAra .L_16_blocks_overflow_szlfmGmeuofoAra: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_szlfmGmeuofoAra: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FgCEuitmambDkxu subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FgCEuitmambDkxu .L_small_initial_partial_block_FgCEuitmambDkxu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FgCEuitmambDkxu: orq %r8,%r8 je .L_after_reduction_FgCEuitmambDkxu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FgCEuitmambDkxu: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_13_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_knBrwwsfezoBuDz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_knBrwwsfezoBuDz .L_16_blocks_overflow_knBrwwsfezoBuDz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_knBrwwsfezoBuDz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_szDFenAfBoEDgjz subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_szDFenAfBoEDgjz .L_small_initial_partial_block_szDFenAfBoEDgjz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_szDFenAfBoEDgjz: orq %r8,%r8 je .L_after_reduction_szDFenAfBoEDgjz vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_szDFenAfBoEDgjz: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_14_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_xfkAqxxGjDnhBjB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_xfkAqxxGjDnhBjB .L_16_blocks_overflow_xfkAqxxGjDnhBjB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_xfkAqxxGjDnhBjB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xpnwxzswluGFliu subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xpnwxzswluGFliu .L_small_initial_partial_block_xpnwxzswluGFliu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xpnwxzswluGFliu: orq %r8,%r8 je .L_after_reduction_xpnwxzswluGFliu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_xpnwxzswluGFliu: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_15_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_myvDpkrqCoAukhb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_myvDpkrqCoAukhb .L_16_blocks_overflow_myvDpkrqCoAukhb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_myvDpkrqCoAukhb: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jjDbyaqFmGmaiEB subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jjDbyaqFmGmaiEB .L_small_initial_partial_block_jjDbyaqFmGmaiEB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jjDbyaqFmGmaiEB: orq %r8,%r8 je .L_after_reduction_jjDbyaqFmGmaiEB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jjDbyaqFmGmaiEB: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_16_CfrpfvcjvvrcbGa: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_zEAEoetgkvqojFa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_zEAEoetgkvqojFa .L_16_blocks_overflow_zEAEoetgkvqojFa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_zEAEoetgkvqojFa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_erAoEayjDqpuhEu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_erAoEayjDqpuhEu: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_erAoEayjDqpuhEu: jmp .L_last_blocks_done_CfrpfvcjvvrcbGa .L_last_num_blocks_is_0_CfrpfvcjvvrcbGa: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_CfrpfvcjvvrcbGa: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_pzwgkGgbplFqzaB .L_encrypt_16_blocks_pzwgkGgbplFqzaB: cmpb $240,%r15b jae .L_16_blocks_overflow_rkcxrDqAhslhkiA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_rkcxrDqAhslhkiA .L_16_blocks_overflow_rkcxrDqAhslhkiA: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_rkcxrDqAhslhkiA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_aibBfiDGEtrGszv cmpl $8,%r10d je .L_last_num_blocks_is_8_aibBfiDGEtrGszv jb .L_last_num_blocks_is_7_1_aibBfiDGEtrGszv cmpl $12,%r10d je .L_last_num_blocks_is_12_aibBfiDGEtrGszv jb .L_last_num_blocks_is_11_9_aibBfiDGEtrGszv cmpl $15,%r10d je .L_last_num_blocks_is_15_aibBfiDGEtrGszv ja .L_last_num_blocks_is_16_aibBfiDGEtrGszv cmpl $14,%r10d je .L_last_num_blocks_is_14_aibBfiDGEtrGszv jmp .L_last_num_blocks_is_13_aibBfiDGEtrGszv .L_last_num_blocks_is_11_9_aibBfiDGEtrGszv: cmpl $10,%r10d je .L_last_num_blocks_is_10_aibBfiDGEtrGszv ja .L_last_num_blocks_is_11_aibBfiDGEtrGszv jmp .L_last_num_blocks_is_9_aibBfiDGEtrGszv .L_last_num_blocks_is_7_1_aibBfiDGEtrGszv: cmpl $4,%r10d je .L_last_num_blocks_is_4_aibBfiDGEtrGszv jb .L_last_num_blocks_is_3_1_aibBfiDGEtrGszv cmpl $6,%r10d ja .L_last_num_blocks_is_7_aibBfiDGEtrGszv je .L_last_num_blocks_is_6_aibBfiDGEtrGszv jmp .L_last_num_blocks_is_5_aibBfiDGEtrGszv .L_last_num_blocks_is_3_1_aibBfiDGEtrGszv: cmpl $2,%r10d ja .L_last_num_blocks_is_3_aibBfiDGEtrGszv je .L_last_num_blocks_is_2_aibBfiDGEtrGszv .L_last_num_blocks_is_1_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_CfAjeyGwbnghnsF vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_CfAjeyGwbnghnsF .L_16_blocks_overflow_CfAjeyGwbnghnsF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_CfAjeyGwbnghnsF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_EpCxqyApoFBApzn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EpCxqyApoFBApzn .L_small_initial_partial_block_EpCxqyApoFBApzn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_EpCxqyApoFBApzn .L_small_initial_compute_done_EpCxqyApoFBApzn: .L_after_reduction_EpCxqyApoFBApzn: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_2_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_sbkoxvmnmihnaig vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_sbkoxvmnmihnaig .L_16_blocks_overflow_sbkoxvmnmihnaig: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_sbkoxvmnmihnaig: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rlBeEnisjmybagx subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rlBeEnisjmybagx .L_small_initial_partial_block_rlBeEnisjmybagx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rlBeEnisjmybagx: orq %r8,%r8 je .L_after_reduction_rlBeEnisjmybagx vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rlBeEnisjmybagx: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_3_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_zopCCjajxtsjEdG vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_zopCCjajxtsjEdG .L_16_blocks_overflow_zopCCjajxtsjEdG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_zopCCjajxtsjEdG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hzwxdhlzEAlznGG subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hzwxdhlzEAlznGG .L_small_initial_partial_block_hzwxdhlzEAlznGG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hzwxdhlzEAlznGG: orq %r8,%r8 je .L_after_reduction_hzwxdhlzEAlznGG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_hzwxdhlzEAlznGG: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_4_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_utgfjaowycovqbp vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_utgfjaowycovqbp .L_16_blocks_overflow_utgfjaowycovqbp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_utgfjaowycovqbp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AslmndcqqeqAFer subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AslmndcqqeqAFer .L_small_initial_partial_block_AslmndcqqeqAFer: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AslmndcqqeqAFer: orq %r8,%r8 je .L_after_reduction_AslmndcqqeqAFer vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AslmndcqqeqAFer: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_5_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_wugoGjfryfqCjFa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_wugoGjfryfqCjFa .L_16_blocks_overflow_wugoGjfryfqCjFa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_wugoGjfryfqCjFa: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_CFkxkbxvkninECi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_CFkxkbxvkninECi .L_small_initial_partial_block_CFkxkbxvkninECi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_CFkxkbxvkninECi: orq %r8,%r8 je .L_after_reduction_CFkxkbxvkninECi vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_CFkxkbxvkninECi: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_6_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_bpCexfjrkbCbhBc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_bpCexfjrkbCbhBc .L_16_blocks_overflow_bpCexfjrkbCbhBc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_bpCexfjrkbCbhBc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ojmsEGarpmywurj subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ojmsEGarpmywurj .L_small_initial_partial_block_ojmsEGarpmywurj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ojmsEGarpmywurj: orq %r8,%r8 je .L_after_reduction_ojmsEGarpmywurj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ojmsEGarpmywurj: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_7_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_ifByzBizpdBxFnD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_ifByzBizpdBxFnD .L_16_blocks_overflow_ifByzBizpdBxFnD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_ifByzBizpdBxFnD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yEEshkytCfbpoyC subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yEEshkytCfbpoyC .L_small_initial_partial_block_yEEshkytCfbpoyC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yEEshkytCfbpoyC: orq %r8,%r8 je .L_after_reduction_yEEshkytCfbpoyC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yEEshkytCfbpoyC: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_8_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_cjwhqEvpCfjCcEa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_cjwhqEvpCfjCcEa .L_16_blocks_overflow_cjwhqEvpCfjCcEa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_cjwhqEvpCfjCcEa: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EqvthrGbiBgAmsm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EqvthrGbiBgAmsm .L_small_initial_partial_block_EqvthrGbiBgAmsm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EqvthrGbiBgAmsm: orq %r8,%r8 je .L_after_reduction_EqvthrGbiBgAmsm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EqvthrGbiBgAmsm: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_9_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_xiomBjDmsdhvtig vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_xiomBjDmsdhvtig .L_16_blocks_overflow_xiomBjDmsdhvtig: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_xiomBjDmsdhvtig: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mbfjpvagktvcgbq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mbfjpvagktvcgbq .L_small_initial_partial_block_mbfjpvagktvcgbq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mbfjpvagktvcgbq: orq %r8,%r8 je .L_after_reduction_mbfjpvagktvcgbq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_mbfjpvagktvcgbq: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_10_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_cEyikykuFcExlBe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_cEyikykuFcExlBe .L_16_blocks_overflow_cEyikykuFcExlBe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_cEyikykuFcExlBe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zkabbaDExfgmaqw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zkabbaDExfgmaqw .L_small_initial_partial_block_zkabbaDExfgmaqw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zkabbaDExfgmaqw: orq %r8,%r8 je .L_after_reduction_zkabbaDExfgmaqw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_zkabbaDExfgmaqw: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_11_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_gsBoGfzrmwqlomo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_gsBoGfzrmwqlomo .L_16_blocks_overflow_gsBoGfzrmwqlomo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_gsBoGfzrmwqlomo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jaixjmwppjCmscj subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jaixjmwppjCmscj .L_small_initial_partial_block_jaixjmwppjCmscj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jaixjmwppjCmscj: orq %r8,%r8 je .L_after_reduction_jaixjmwppjCmscj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jaixjmwppjCmscj: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_12_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_CAvgqgqjrtonFws vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_CAvgqgqjrtonFws .L_16_blocks_overflow_CAvgqgqjrtonFws: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_CAvgqgqjrtonFws: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hvmFFygfifAjAnG subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hvmFFygfifAjAnG .L_small_initial_partial_block_hvmFFygfifAjAnG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hvmFFygfifAjAnG: orq %r8,%r8 je .L_after_reduction_hvmFFygfifAjAnG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_hvmFFygfifAjAnG: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_13_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_zqBffksAbxFoiFr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_zqBffksAbxFoiFr .L_16_blocks_overflow_zqBffksAbxFoiFr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_zqBffksAbxFoiFr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kmvbbtEzBEoeAuq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kmvbbtEzBEoeAuq .L_small_initial_partial_block_kmvbbtEzBEoeAuq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kmvbbtEzBEoeAuq: orq %r8,%r8 je .L_after_reduction_kmvbbtEzBEoeAuq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kmvbbtEzBEoeAuq: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_14_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_mBiifnhuGFDpfDy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_mBiifnhuGFDpfDy .L_16_blocks_overflow_mBiifnhuGFDpfDy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_mBiifnhuGFDpfDy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_imGnxqypsDyhyek subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_imGnxqypsDyhyek .L_small_initial_partial_block_imGnxqypsDyhyek: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_imGnxqypsDyhyek: orq %r8,%r8 je .L_after_reduction_imGnxqypsDyhyek vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_imGnxqypsDyhyek: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_15_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_zDGlqyFvuaglkeB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_zDGlqyFvuaglkeB .L_16_blocks_overflow_zDGlqyFvuaglkeB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_zDGlqyFvuaglkeB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BuGprjrzjxrmorl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BuGprjrzjxrmorl .L_small_initial_partial_block_BuGprjrzjxrmorl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BuGprjrzjxrmorl: orq %r8,%r8 je .L_after_reduction_BuGprjrzjxrmorl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BuGprjrzjxrmorl: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_16_aibBfiDGEtrGszv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_uwtqqfwgewBdjhg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_uwtqqfwgewBdjhg .L_16_blocks_overflow_uwtqqfwgewBdjhg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_uwtqqfwgewBdjhg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_ifytbdtuElzEqkG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ifytbdtuElzEqkG: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ifytbdtuElzEqkG: jmp .L_last_blocks_done_aibBfiDGEtrGszv .L_last_num_blocks_is_0_aibBfiDGEtrGszv: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_aibBfiDGEtrGszv: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_pzwgkGgbplFqzaB .L_message_below_32_blocks_pzwgkGgbplFqzaB: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_mgjxphyGhnqeEta vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .L_skip_hkeys_precomputation_mgjxphyGhnqeEta: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_FjCtcrwcdAhCtrr cmpl $8,%r10d je .L_last_num_blocks_is_8_FjCtcrwcdAhCtrr jb .L_last_num_blocks_is_7_1_FjCtcrwcdAhCtrr cmpl $12,%r10d je .L_last_num_blocks_is_12_FjCtcrwcdAhCtrr jb .L_last_num_blocks_is_11_9_FjCtcrwcdAhCtrr cmpl $15,%r10d je .L_last_num_blocks_is_15_FjCtcrwcdAhCtrr ja .L_last_num_blocks_is_16_FjCtcrwcdAhCtrr cmpl $14,%r10d je .L_last_num_blocks_is_14_FjCtcrwcdAhCtrr jmp .L_last_num_blocks_is_13_FjCtcrwcdAhCtrr .L_last_num_blocks_is_11_9_FjCtcrwcdAhCtrr: cmpl $10,%r10d je .L_last_num_blocks_is_10_FjCtcrwcdAhCtrr ja .L_last_num_blocks_is_11_FjCtcrwcdAhCtrr jmp .L_last_num_blocks_is_9_FjCtcrwcdAhCtrr .L_last_num_blocks_is_7_1_FjCtcrwcdAhCtrr: cmpl $4,%r10d je .L_last_num_blocks_is_4_FjCtcrwcdAhCtrr jb .L_last_num_blocks_is_3_1_FjCtcrwcdAhCtrr cmpl $6,%r10d ja .L_last_num_blocks_is_7_FjCtcrwcdAhCtrr je .L_last_num_blocks_is_6_FjCtcrwcdAhCtrr jmp .L_last_num_blocks_is_5_FjCtcrwcdAhCtrr .L_last_num_blocks_is_3_1_FjCtcrwcdAhCtrr: cmpl $2,%r10d ja .L_last_num_blocks_is_3_FjCtcrwcdAhCtrr je .L_last_num_blocks_is_2_FjCtcrwcdAhCtrr .L_last_num_blocks_is_1_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_zCjdttbyboeGxFb vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_zCjdttbyboeGxFb .L_16_blocks_overflow_zCjdttbyboeGxFb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_zCjdttbyboeGxFb: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_ojiwxsAElGDCCBo subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ojiwxsAElGDCCBo .L_small_initial_partial_block_ojiwxsAElGDCCBo: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_ojiwxsAElGDCCBo .L_small_initial_compute_done_ojiwxsAElGDCCBo: .L_after_reduction_ojiwxsAElGDCCBo: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_2_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_fhFvhqpaozkgyzE vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_fhFvhqpaozkgyzE .L_16_blocks_overflow_fhFvhqpaozkgyzE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_fhFvhqpaozkgyzE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_poknuzddusxymkw subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_poknuzddusxymkw .L_small_initial_partial_block_poknuzddusxymkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_poknuzddusxymkw: orq %r8,%r8 je .L_after_reduction_poknuzddusxymkw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_poknuzddusxymkw: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_3_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_hjBmpccGhruhCnv vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_hjBmpccGhruhCnv .L_16_blocks_overflow_hjBmpccGhruhCnv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_hjBmpccGhruhCnv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yshcwAFsbqgougy subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yshcwAFsbqgougy .L_small_initial_partial_block_yshcwAFsbqgougy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yshcwAFsbqgougy: orq %r8,%r8 je .L_after_reduction_yshcwAFsbqgougy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yshcwAFsbqgougy: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_4_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_bBrsEuBDcsAcscn vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_bBrsEuBDcsAcscn .L_16_blocks_overflow_bBrsEuBDcsAcscn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_bBrsEuBDcsAcscn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_bDghuGEnDqEshwp subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_bDghuGEnDqEshwp .L_small_initial_partial_block_bDghuGEnDqEshwp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bDghuGEnDqEshwp: orq %r8,%r8 je .L_after_reduction_bDghuGEnDqEshwp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_bDghuGEnDqEshwp: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_5_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_nygdGeFptfwzvpw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_nygdGeFptfwzvpw .L_16_blocks_overflow_nygdGeFptfwzvpw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_nygdGeFptfwzvpw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dFGmpkoEnwhmCiq subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dFGmpkoEnwhmCiq .L_small_initial_partial_block_dFGmpkoEnwhmCiq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dFGmpkoEnwhmCiq: orq %r8,%r8 je .L_after_reduction_dFGmpkoEnwhmCiq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dFGmpkoEnwhmCiq: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_6_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_adtbeheumiAkmlw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_adtbeheumiAkmlw .L_16_blocks_overflow_adtbeheumiAkmlw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_adtbeheumiAkmlw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nDqCwyzuFDuivbj subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nDqCwyzuFDuivbj .L_small_initial_partial_block_nDqCwyzuFDuivbj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nDqCwyzuFDuivbj: orq %r8,%r8 je .L_after_reduction_nDqCwyzuFDuivbj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nDqCwyzuFDuivbj: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_7_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_aDdoAskralEtovy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_aDdoAskralEtovy .L_16_blocks_overflow_aDdoAskralEtovy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_aDdoAskralEtovy: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GmgCerxizidGGeG subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GmgCerxizidGGeG .L_small_initial_partial_block_GmgCerxizidGGeG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GmgCerxizidGGeG: orq %r8,%r8 je .L_after_reduction_GmgCerxizidGGeG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GmgCerxizidGGeG: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_8_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_hjBdmnrbjjzAbzC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_hjBdmnrbjjzAbzC .L_16_blocks_overflow_hjBdmnrbjjzAbzC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_hjBdmnrbjjzAbzC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_oukGaFAnaceFaaB subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_oukGaFAnaceFaaB .L_small_initial_partial_block_oukGaFAnaceFaaB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_oukGaFAnaceFaaB: orq %r8,%r8 je .L_after_reduction_oukGaFAnaceFaaB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_oukGaFAnaceFaaB: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_9_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_szBmuqzxwjxBawF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_szBmuqzxwjxBawF .L_16_blocks_overflow_szBmuqzxwjxBawF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_szBmuqzxwjxBawF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_paBklhesgEuGBAF subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_paBklhesgEuGBAF .L_small_initial_partial_block_paBklhesgEuGBAF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_paBklhesgEuGBAF: orq %r8,%r8 je .L_after_reduction_paBklhesgEuGBAF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_paBklhesgEuGBAF: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_10_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_xhlcvtlyGczsicp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_xhlcvtlyGczsicp .L_16_blocks_overflow_xhlcvtlyGczsicp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_xhlcvtlyGczsicp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nytpeiDsozzjuGs subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nytpeiDsozzjuGs .L_small_initial_partial_block_nytpeiDsozzjuGs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nytpeiDsozzjuGs: orq %r8,%r8 je .L_after_reduction_nytpeiDsozzjuGs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nytpeiDsozzjuGs: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_11_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_CkhBiupnDlzBoGx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_CkhBiupnDlzBoGx .L_16_blocks_overflow_CkhBiupnDlzBoGx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_CkhBiupnDlzBoGx: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BsdepnxnqoCzhkf subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BsdepnxnqoCzhkf .L_small_initial_partial_block_BsdepnxnqoCzhkf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BsdepnxnqoCzhkf: orq %r8,%r8 je .L_after_reduction_BsdepnxnqoCzhkf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BsdepnxnqoCzhkf: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_12_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_svvcxnisrDiilsD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_svvcxnisrDiilsD .L_16_blocks_overflow_svvcxnisrDiilsD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_svvcxnisrDiilsD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vFAcldEivdmCjng subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vFAcldEivdmCjng .L_small_initial_partial_block_vFAcldEivdmCjng: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vFAcldEivdmCjng: orq %r8,%r8 je .L_after_reduction_vFAcldEivdmCjng vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vFAcldEivdmCjng: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_13_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_oDDmorFzihnoffg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_oDDmorFzihnoffg .L_16_blocks_overflow_oDDmorFzihnoffg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_oDDmorFzihnoffg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yccbCzjnDwADEak subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yccbCzjnDwADEak .L_small_initial_partial_block_yccbCzjnDwADEak: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yccbCzjnDwADEak: orq %r8,%r8 je .L_after_reduction_yccbCzjnDwADEak vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yccbCzjnDwADEak: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_14_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_liipuseeafvnkfi vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_liipuseeafvnkfi .L_16_blocks_overflow_liipuseeafvnkfi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_liipuseeafvnkfi: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BentjlpjfFDzvxb subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BentjlpjfFDzvxb .L_small_initial_partial_block_BentjlpjfFDzvxb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BentjlpjfFDzvxb: orq %r8,%r8 je .L_after_reduction_BentjlpjfFDzvxb vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BentjlpjfFDzvxb: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_15_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_cuygxmuthGeaeby vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_cuygxmuthGeaeby .L_16_blocks_overflow_cuygxmuthGeaeby: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_cuygxmuthGeaeby: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qokhdigphzzzcxp subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qokhdigphzzzcxp .L_small_initial_partial_block_qokhdigphzzzcxp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qokhdigphzzzcxp: orq %r8,%r8 je .L_after_reduction_qokhdigphzzzcxp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qokhdigphzzzcxp: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_16_FjCtcrwcdAhCtrr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_AgkAgztElEpGqer vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_AgkAgztElEpGqer .L_16_blocks_overflow_AgkAgztElEpGqer: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_AgkAgztElEpGqer: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_eruDeitqttsEEhG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_eruDeitqttsEEhG: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_eruDeitqttsEEhG: jmp .L_last_blocks_done_FjCtcrwcdAhCtrr .L_last_num_blocks_is_0_FjCtcrwcdAhCtrr: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_FjCtcrwcdAhCtrr: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_pzwgkGgbplFqzaB .L_message_below_equal_16_blocks_pzwgkGgbplFqzaB: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je .L_small_initial_num_blocks_is_8_Arjlgemsqpaxhfj jl .L_small_initial_num_blocks_is_7_1_Arjlgemsqpaxhfj cmpq $12,%r12 je .L_small_initial_num_blocks_is_12_Arjlgemsqpaxhfj jl .L_small_initial_num_blocks_is_11_9_Arjlgemsqpaxhfj cmpq $16,%r12 je .L_small_initial_num_blocks_is_16_Arjlgemsqpaxhfj cmpq $15,%r12 je .L_small_initial_num_blocks_is_15_Arjlgemsqpaxhfj cmpq $14,%r12 je .L_small_initial_num_blocks_is_14_Arjlgemsqpaxhfj jmp .L_small_initial_num_blocks_is_13_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_11_9_Arjlgemsqpaxhfj: cmpq $11,%r12 je .L_small_initial_num_blocks_is_11_Arjlgemsqpaxhfj cmpq $10,%r12 je .L_small_initial_num_blocks_is_10_Arjlgemsqpaxhfj jmp .L_small_initial_num_blocks_is_9_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_7_1_Arjlgemsqpaxhfj: cmpq $4,%r12 je .L_small_initial_num_blocks_is_4_Arjlgemsqpaxhfj jl .L_small_initial_num_blocks_is_3_1_Arjlgemsqpaxhfj cmpq $7,%r12 je .L_small_initial_num_blocks_is_7_Arjlgemsqpaxhfj cmpq $6,%r12 je .L_small_initial_num_blocks_is_6_Arjlgemsqpaxhfj jmp .L_small_initial_num_blocks_is_5_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_3_1_Arjlgemsqpaxhfj: cmpq $3,%r12 je .L_small_initial_num_blocks_is_3_Arjlgemsqpaxhfj cmpq $2,%r12 je .L_small_initial_num_blocks_is_2_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_1_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_qFFkbngiCspnnzb subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qFFkbngiCspnnzb .L_small_initial_partial_block_qFFkbngiCspnnzb: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp .L_after_reduction_qFFkbngiCspnnzb .L_small_initial_compute_done_qFFkbngiCspnnzb: .L_after_reduction_qFFkbngiCspnnzb: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_2_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vcznqnCBEluErfz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vcznqnCBEluErfz .L_small_initial_partial_block_vcznqnCBEluErfz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vcznqnCBEluErfz: orq %r8,%r8 je .L_after_reduction_vcznqnCBEluErfz vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_vcznqnCBEluErfz: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_3_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lxlwCnecElggboh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lxlwCnecElggboh .L_small_initial_partial_block_lxlwCnecElggboh: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lxlwCnecElggboh: orq %r8,%r8 je .L_after_reduction_lxlwCnecElggboh vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_lxlwCnecElggboh: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_4_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_uwbAugwxtaEtqkm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_uwbAugwxtaEtqkm .L_small_initial_partial_block_uwbAugwxtaEtqkm: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_uwbAugwxtaEtqkm: orq %r8,%r8 je .L_after_reduction_uwbAugwxtaEtqkm vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_uwbAugwxtaEtqkm: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_5_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %xmm29,%xmm3,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DlAbwtibuwDuckF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DlAbwtibuwDuckF .L_small_initial_partial_block_DlAbwtibuwDuckF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DlAbwtibuwDuckF: orq %r8,%r8 je .L_after_reduction_DlAbwtibuwDuckF vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_DlAbwtibuwDuckF: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_6_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %ymm29,%ymm3,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_bGfevqujtGrmyqw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_bGfevqujtGrmyqw .L_small_initial_partial_block_bGfevqujtGrmyqw: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bGfevqujtGrmyqw: orq %r8,%r8 je .L_after_reduction_bGfevqujtGrmyqw vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_bGfevqujtGrmyqw: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_7_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BDbECEkpAEccDln subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BDbECEkpAEccDln .L_small_initial_partial_block_BDbECEkpAEccDln: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BDbECEkpAEccDln: orq %r8,%r8 je .L_after_reduction_BDbECEkpAEccDln vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_BDbECEkpAEccDln: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_8_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ngynpdbFzwtiwpp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ngynpdbFzwtiwpp .L_small_initial_partial_block_ngynpdbFzwtiwpp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ngynpdbFzwtiwpp: orq %r8,%r8 je .L_after_reduction_ngynpdbFzwtiwpp vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_ngynpdbFzwtiwpp: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_9_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %xmm29,%xmm4,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_bxCwlFCulijpvoi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_bxCwlFCulijpvoi .L_small_initial_partial_block_bxCwlFCulijpvoi: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bxCwlFCulijpvoi: orq %r8,%r8 je .L_after_reduction_bxCwlFCulijpvoi vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_bxCwlFCulijpvoi: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_10_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %ymm29,%ymm4,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kAlkoicirsyCsoA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kAlkoicirsyCsoA .L_small_initial_partial_block_kAlkoicirsyCsoA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kAlkoicirsyCsoA: orq %r8,%r8 je .L_after_reduction_kAlkoicirsyCsoA vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_kAlkoicirsyCsoA: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_11_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rsxtmscApkaFsGk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rsxtmscApkaFsGk .L_small_initial_partial_block_rsxtmscApkaFsGk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rsxtmscApkaFsGk: orq %r8,%r8 je .L_after_reduction_rsxtmscApkaFsGk vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_rsxtmscApkaFsGk: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_12_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_uytuqlquheEjDpf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_uytuqlquheEjDpf .L_small_initial_partial_block_uytuqlquheEjDpf: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_uytuqlquheEjDpf: orq %r8,%r8 je .L_after_reduction_uytuqlquheEjDpf vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_uytuqlquheEjDpf: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_13_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %xmm29,%xmm5,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AEjkiAmqCDcyaGF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AEjkiAmqCDcyaGF .L_small_initial_partial_block_AEjkiAmqCDcyaGF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AEjkiAmqCDcyaGF: orq %r8,%r8 je .L_after_reduction_AEjkiAmqCDcyaGF vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_AEjkiAmqCDcyaGF: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_14_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %ymm29,%ymm5,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xweDbtGBgzaynjE subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xweDbtGBgzaynjE .L_small_initial_partial_block_xweDbtGBgzaynjE: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xweDbtGBgzaynjE: orq %r8,%r8 je .L_after_reduction_xweDbtGBgzaynjE vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_xweDbtGBgzaynjE: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_15_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ffBpbkEzFalCAqm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ffBpbkEzFalCAqm .L_small_initial_partial_block_ffBpbkEzFalCAqm: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ffBpbkEzFalCAqm: orq %r8,%r8 je .L_after_reduction_ffBpbkEzFalCAqm vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_ffBpbkEzFalCAqm: jmp .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj .L_small_initial_num_blocks_is_16_Arjlgemsqpaxhfj: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_icDnnqvChyBsuli: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_icDnnqvChyBsuli: vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_icDnnqvChyBsuli: .L_small_initial_blocks_encrypted_Arjlgemsqpaxhfj: .L_ghash_done_pzwgkGgbplFqzaB: vmovdqu64 %xmm2,0(%rsi) .L_enc_dec_done_pzwgkGgbplFqzaB: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) .L_enc_dec_abort_pzwgkGgbplFqzaB: jmp .Lexit_gcm_encrypt .align 32 .Laes_gcm_encrypt_192_avx512: orq %r8,%r8 je .L_enc_dec_abort_jzxBnczDBxGvzop xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je .L_partial_block_done_CoBypAsApBwqcnx movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge .L_no_extra_mask_CoBypAsApBwqcnx subq %r13,%r12 .L_no_extra_mask_CoBypAsApBwqcnx: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm3,%xmm14,%xmm14 cmpq $0,%r13 jl .L_partial_incomplete_CoBypAsApBwqcnx .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp .L_enc_dec_done_CoBypAsApBwqcnx .L_partial_incomplete_CoBypAsApBwqcnx: addl %r8d,(%rdx) movq %r8,%rax .L_enc_dec_done_CoBypAsApBwqcnx: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} .L_partial_block_done_CoBypAsApBwqcnx: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je .L_enc_dec_done_jzxBnczDBxGvzop cmpq $256,%r8 jbe .L_message_below_equal_16_blocks_jzxBnczDBxGvzop vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae .L_next_16_overflow_lelEEvckqsGkuGn vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_lelEEvckqsGkuGn .L_next_16_overflow_lelEEvckqsGkuGn: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_lelEEvckqsGkuGn: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_EclAduckuFhozAl vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) .L_skip_hkeys_precomputation_EclAduckuFhozAl: cmpq $512,%r8 jb .L_message_below_32_blocks_jzxBnczDBxGvzop cmpb $240,%r15b jae .L_next_16_overflow_hzduBGFfzuzeflu vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_hzduBGFfzuzeflu .L_next_16_overflow_hzduBGFfzuzeflu: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_hzduBGFfzuzeflu: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz .L_skip_hkeys_precomputation_vFCorhCAmhdDCzm vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) .L_skip_hkeys_precomputation_vFCorhCAmhdDCzm: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb .L_no_more_big_nblocks_jzxBnczDBxGvzop .L_encrypt_big_nblocks_jzxBnczDBxGvzop: cmpb $240,%r15b jae .L_16_blocks_overflow_tbpqxctvntvnomu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_tbpqxctvntvnomu .L_16_blocks_overflow_tbpqxctvntvnomu: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_tbpqxctvntvnomu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_oaDubdDhvdaaGvl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_oaDubdDhvdaaGvl .L_16_blocks_overflow_oaDubdDhvdaaGvl: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_oaDubdDhvdaaGvl: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_BEBEkieDehCjfpg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_BEBEkieDehCjfpg .L_16_blocks_overflow_BEBEkieDehCjfpg: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_BEBEkieDehCjfpg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae .L_encrypt_big_nblocks_jzxBnczDBxGvzop .L_no_more_big_nblocks_jzxBnczDBxGvzop: cmpq $512,%r8 jae .L_encrypt_32_blocks_jzxBnczDBxGvzop cmpq $256,%r8 jae .L_encrypt_16_blocks_jzxBnczDBxGvzop .L_encrypt_0_blocks_ghash_32_jzxBnczDBxGvzop: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_BsvdFlCzDbBougk cmpl $8,%r10d je .L_last_num_blocks_is_8_BsvdFlCzDbBougk jb .L_last_num_blocks_is_7_1_BsvdFlCzDbBougk cmpl $12,%r10d je .L_last_num_blocks_is_12_BsvdFlCzDbBougk jb .L_last_num_blocks_is_11_9_BsvdFlCzDbBougk cmpl $15,%r10d je .L_last_num_blocks_is_15_BsvdFlCzDbBougk ja .L_last_num_blocks_is_16_BsvdFlCzDbBougk cmpl $14,%r10d je .L_last_num_blocks_is_14_BsvdFlCzDbBougk jmp .L_last_num_blocks_is_13_BsvdFlCzDbBougk .L_last_num_blocks_is_11_9_BsvdFlCzDbBougk: cmpl $10,%r10d je .L_last_num_blocks_is_10_BsvdFlCzDbBougk ja .L_last_num_blocks_is_11_BsvdFlCzDbBougk jmp .L_last_num_blocks_is_9_BsvdFlCzDbBougk .L_last_num_blocks_is_7_1_BsvdFlCzDbBougk: cmpl $4,%r10d je .L_last_num_blocks_is_4_BsvdFlCzDbBougk jb .L_last_num_blocks_is_3_1_BsvdFlCzDbBougk cmpl $6,%r10d ja .L_last_num_blocks_is_7_BsvdFlCzDbBougk je .L_last_num_blocks_is_6_BsvdFlCzDbBougk jmp .L_last_num_blocks_is_5_BsvdFlCzDbBougk .L_last_num_blocks_is_3_1_BsvdFlCzDbBougk: cmpl $2,%r10d ja .L_last_num_blocks_is_3_BsvdFlCzDbBougk je .L_last_num_blocks_is_2_BsvdFlCzDbBougk .L_last_num_blocks_is_1_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_czjqmrcuGbkhjtu vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_czjqmrcuGbkhjtu .L_16_blocks_overflow_czjqmrcuGbkhjtu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_czjqmrcuGbkhjtu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_kFnqfsluDrycrwr subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kFnqfsluDrycrwr .L_small_initial_partial_block_kFnqfsluDrycrwr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_kFnqfsluDrycrwr .L_small_initial_compute_done_kFnqfsluDrycrwr: .L_after_reduction_kFnqfsluDrycrwr: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_2_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_tCDuaqxntEtBCqr vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_tCDuaqxntEtBCqr .L_16_blocks_overflow_tCDuaqxntEtBCqr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_tCDuaqxntEtBCqr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nrarlmFvApvbzxy subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nrarlmFvApvbzxy .L_small_initial_partial_block_nrarlmFvApvbzxy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nrarlmFvApvbzxy: orq %r8,%r8 je .L_after_reduction_nrarlmFvApvbzxy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nrarlmFvApvbzxy: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_3_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_AxfvkflbDBEFEmp vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_AxfvkflbDBEFEmp .L_16_blocks_overflow_AxfvkflbDBEFEmp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_AxfvkflbDBEFEmp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wasBAmmrjbGbemo subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wasBAmmrjbGbemo .L_small_initial_partial_block_wasBAmmrjbGbemo: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wasBAmmrjbGbemo: orq %r8,%r8 je .L_after_reduction_wasBAmmrjbGbemo vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wasBAmmrjbGbemo: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_4_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_xkpgotEfuidCEnC vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_xkpgotEfuidCEnC .L_16_blocks_overflow_xkpgotEfuidCEnC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_xkpgotEfuidCEnC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DvcssyjwzrqmFlE subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DvcssyjwzrqmFlE .L_small_initial_partial_block_DvcssyjwzrqmFlE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DvcssyjwzrqmFlE: orq %r8,%r8 je .L_after_reduction_DvcssyjwzrqmFlE vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DvcssyjwzrqmFlE: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_5_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_pxAyyxhuewraobh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_pxAyyxhuewraobh .L_16_blocks_overflow_pxAyyxhuewraobh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_pxAyyxhuewraobh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_Eawjwfemrjotopq subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_Eawjwfemrjotopq .L_small_initial_partial_block_Eawjwfemrjotopq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_Eawjwfemrjotopq: orq %r8,%r8 je .L_after_reduction_Eawjwfemrjotopq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_Eawjwfemrjotopq: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_6_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_rlBkdasaFkzjByu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_rlBkdasaFkzjByu .L_16_blocks_overflow_rlBkdasaFkzjByu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_rlBkdasaFkzjByu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AesejBFGrhphEgi subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AesejBFGrhphEgi .L_small_initial_partial_block_AesejBFGrhphEgi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AesejBFGrhphEgi: orq %r8,%r8 je .L_after_reduction_AesejBFGrhphEgi vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AesejBFGrhphEgi: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_7_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_keqkskoubnuElfA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_keqkskoubnuElfA .L_16_blocks_overflow_keqkskoubnuElfA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_keqkskoubnuElfA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BxDABaeeqkhilCj subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BxDABaeeqkhilCj .L_small_initial_partial_block_BxDABaeeqkhilCj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BxDABaeeqkhilCj: orq %r8,%r8 je .L_after_reduction_BxDABaeeqkhilCj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BxDABaeeqkhilCj: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_8_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_hwCFDDlqwBqrdyx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_hwCFDDlqwBqrdyx .L_16_blocks_overflow_hwCFDDlqwBqrdyx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_hwCFDDlqwBqrdyx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BeuuFnmEliqBmCs subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BeuuFnmEliqBmCs .L_small_initial_partial_block_BeuuFnmEliqBmCs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BeuuFnmEliqBmCs: orq %r8,%r8 je .L_after_reduction_BeuuFnmEliqBmCs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BeuuFnmEliqBmCs: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_9_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_ybEEnfpGmbdDyaC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_ybEEnfpGmbdDyaC .L_16_blocks_overflow_ybEEnfpGmbdDyaC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_ybEEnfpGmbdDyaC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_bDrrnAatcuCrjCa subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_bDrrnAatcuCrjCa .L_small_initial_partial_block_bDrrnAatcuCrjCa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bDrrnAatcuCrjCa: orq %r8,%r8 je .L_after_reduction_bDrrnAatcuCrjCa vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_bDrrnAatcuCrjCa: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_10_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_opfbCaznAiAepnv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_opfbCaznAiAepnv .L_16_blocks_overflow_opfbCaznAiAepnv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_opfbCaznAiAepnv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_CsmvpucAbBEBcvl subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_CsmvpucAbBEBcvl .L_small_initial_partial_block_CsmvpucAbBEBcvl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_CsmvpucAbBEBcvl: orq %r8,%r8 je .L_after_reduction_CsmvpucAbBEBcvl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_CsmvpucAbBEBcvl: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_11_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_qxFolltldGnscDg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_qxFolltldGnscDg .L_16_blocks_overflow_qxFolltldGnscDg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_qxFolltldGnscDg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AaGweewAhEribny subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AaGweewAhEribny .L_small_initial_partial_block_AaGweewAhEribny: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AaGweewAhEribny: orq %r8,%r8 je .L_after_reduction_AaGweewAhEribny vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AaGweewAhEribny: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_12_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_nvmdGffBdmtukpe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_nvmdGffBdmtukpe .L_16_blocks_overflow_nvmdGffBdmtukpe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_nvmdGffBdmtukpe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FoabkbEhqjtqagB subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FoabkbEhqjtqagB .L_small_initial_partial_block_FoabkbEhqjtqagB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FoabkbEhqjtqagB: orq %r8,%r8 je .L_after_reduction_FoabkbEhqjtqagB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FoabkbEhqjtqagB: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_13_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_zGEqEwwbyegFygC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_zGEqEwwbyegFygC .L_16_blocks_overflow_zGEqEwwbyegFygC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_zGEqEwwbyegFygC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_gjvieAerDfDGsxy subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_gjvieAerDfDGsxy .L_small_initial_partial_block_gjvieAerDfDGsxy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_gjvieAerDfDGsxy: orq %r8,%r8 je .L_after_reduction_gjvieAerDfDGsxy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_gjvieAerDfDGsxy: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_14_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_hGfdBnfArvqgnDo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_hGfdBnfArvqgnDo .L_16_blocks_overflow_hGfdBnfArvqgnDo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_hGfdBnfArvqgnDo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_faDbEijoauEqsyG subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_faDbEijoauEqsyG .L_small_initial_partial_block_faDbEijoauEqsyG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_faDbEijoauEqsyG: orq %r8,%r8 je .L_after_reduction_faDbEijoauEqsyG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_faDbEijoauEqsyG: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_15_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_AhbxhfFAjAuyeFk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_AhbxhfFAjAuyeFk .L_16_blocks_overflow_AhbxhfFAjAuyeFk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_AhbxhfFAjAuyeFk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_sxxFbklDpjCfEvm subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_sxxFbklDpjCfEvm .L_small_initial_partial_block_sxxFbklDpjCfEvm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_sxxFbklDpjCfEvm: orq %r8,%r8 je .L_after_reduction_sxxFbklDpjCfEvm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_sxxFbklDpjCfEvm: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_16_BsvdFlCzDbBougk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_njybzcioxuyaaaD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_njybzcioxuyaaaD .L_16_blocks_overflow_njybzcioxuyaaaD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_njybzcioxuyaaaD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_bCffyflcoaBxCzy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bCffyflcoaBxCzy: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_bCffyflcoaBxCzy: jmp .L_last_blocks_done_BsvdFlCzDbBougk .L_last_num_blocks_is_0_BsvdFlCzDbBougk: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_BsvdFlCzDbBougk: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_jzxBnczDBxGvzop .L_encrypt_32_blocks_jzxBnczDBxGvzop: cmpb $240,%r15b jae .L_16_blocks_overflow_wafuliacDuosCms vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_wafuliacDuosCms .L_16_blocks_overflow_wafuliacDuosCms: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_wafuliacDuosCms: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_kAejpmvyzczzucF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_kAejpmvyzczzucF .L_16_blocks_overflow_kAejpmvyzczzucF: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_kAejpmvyzczzucF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_vkvGtsckpeodFyd cmpl $8,%r10d je .L_last_num_blocks_is_8_vkvGtsckpeodFyd jb .L_last_num_blocks_is_7_1_vkvGtsckpeodFyd cmpl $12,%r10d je .L_last_num_blocks_is_12_vkvGtsckpeodFyd jb .L_last_num_blocks_is_11_9_vkvGtsckpeodFyd cmpl $15,%r10d je .L_last_num_blocks_is_15_vkvGtsckpeodFyd ja .L_last_num_blocks_is_16_vkvGtsckpeodFyd cmpl $14,%r10d je .L_last_num_blocks_is_14_vkvGtsckpeodFyd jmp .L_last_num_blocks_is_13_vkvGtsckpeodFyd .L_last_num_blocks_is_11_9_vkvGtsckpeodFyd: cmpl $10,%r10d je .L_last_num_blocks_is_10_vkvGtsckpeodFyd ja .L_last_num_blocks_is_11_vkvGtsckpeodFyd jmp .L_last_num_blocks_is_9_vkvGtsckpeodFyd .L_last_num_blocks_is_7_1_vkvGtsckpeodFyd: cmpl $4,%r10d je .L_last_num_blocks_is_4_vkvGtsckpeodFyd jb .L_last_num_blocks_is_3_1_vkvGtsckpeodFyd cmpl $6,%r10d ja .L_last_num_blocks_is_7_vkvGtsckpeodFyd je .L_last_num_blocks_is_6_vkvGtsckpeodFyd jmp .L_last_num_blocks_is_5_vkvGtsckpeodFyd .L_last_num_blocks_is_3_1_vkvGtsckpeodFyd: cmpl $2,%r10d ja .L_last_num_blocks_is_3_vkvGtsckpeodFyd je .L_last_num_blocks_is_2_vkvGtsckpeodFyd .L_last_num_blocks_is_1_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_BvAqyjatyidEnnt vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_BvAqyjatyidEnnt .L_16_blocks_overflow_BvAqyjatyidEnnt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_BvAqyjatyidEnnt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_gBeshkmzGvkmrAi subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_gBeshkmzGvkmrAi .L_small_initial_partial_block_gBeshkmzGvkmrAi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_gBeshkmzGvkmrAi .L_small_initial_compute_done_gBeshkmzGvkmrAi: .L_after_reduction_gBeshkmzGvkmrAi: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_2_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_nbawutokAutAqum vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_nbawutokAutAqum .L_16_blocks_overflow_nbawutokAutAqum: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_nbawutokAutAqum: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BlGfnlBkldmmFcw subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BlGfnlBkldmmFcw .L_small_initial_partial_block_BlGfnlBkldmmFcw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BlGfnlBkldmmFcw: orq %r8,%r8 je .L_after_reduction_BlGfnlBkldmmFcw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BlGfnlBkldmmFcw: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_3_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_CwkxGelBrtqaaxv vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_CwkxGelBrtqaaxv .L_16_blocks_overflow_CwkxGelBrtqaaxv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_CwkxGelBrtqaaxv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EEhEwlabesmvDev subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EEhEwlabesmvDev .L_small_initial_partial_block_EEhEwlabesmvDev: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EEhEwlabesmvDev: orq %r8,%r8 je .L_after_reduction_EEhEwlabesmvDev vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EEhEwlabesmvDev: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_4_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_gFpynBlybCeGalG vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_gFpynBlybCeGalG .L_16_blocks_overflow_gFpynBlybCeGalG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_gFpynBlybCeGalG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_gFxpzjaswtGGooa subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_gFxpzjaswtGGooa .L_small_initial_partial_block_gFxpzjaswtGGooa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_gFxpzjaswtGGooa: orq %r8,%r8 je .L_after_reduction_gFxpzjaswtGGooa vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_gFxpzjaswtGGooa: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_5_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_xwErcCwicbEwFqC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_xwErcCwicbEwFqC .L_16_blocks_overflow_xwErcCwicbEwFqC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_xwErcCwicbEwFqC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GaEkADDkkdyyuqC subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GaEkADDkkdyyuqC .L_small_initial_partial_block_GaEkADDkkdyyuqC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GaEkADDkkdyyuqC: orq %r8,%r8 je .L_after_reduction_GaEkADDkkdyyuqC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GaEkADDkkdyyuqC: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_6_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_baDecrAptncCCuf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_baDecrAptncCCuf .L_16_blocks_overflow_baDecrAptncCCuf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_baDecrAptncCCuf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GuszoBBsEjlucdt subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GuszoBBsEjlucdt .L_small_initial_partial_block_GuszoBBsEjlucdt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GuszoBBsEjlucdt: orq %r8,%r8 je .L_after_reduction_GuszoBBsEjlucdt vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GuszoBBsEjlucdt: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_7_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_iltrljarpeDchus vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_iltrljarpeDchus .L_16_blocks_overflow_iltrljarpeDchus: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_iltrljarpeDchus: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iGfglGojAckhaEr subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iGfglGojAckhaEr .L_small_initial_partial_block_iGfglGojAckhaEr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iGfglGojAckhaEr: orq %r8,%r8 je .L_after_reduction_iGfglGojAckhaEr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iGfglGojAckhaEr: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_8_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_eyzjCojxduufqEi vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_eyzjCojxduufqEi .L_16_blocks_overflow_eyzjCojxduufqEi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_eyzjCojxduufqEi: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hczvcmipanjdewG subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hczvcmipanjdewG .L_small_initial_partial_block_hczvcmipanjdewG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hczvcmipanjdewG: orq %r8,%r8 je .L_after_reduction_hczvcmipanjdewG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_hczvcmipanjdewG: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_9_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_bwdCwgCmnErFeDe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_bwdCwgCmnErFeDe .L_16_blocks_overflow_bwdCwgCmnErFeDe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_bwdCwgCmnErFeDe: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nczsEBrGqvtCBoe subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nczsEBrGqvtCBoe .L_small_initial_partial_block_nczsEBrGqvtCBoe: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nczsEBrGqvtCBoe: orq %r8,%r8 je .L_after_reduction_nczsEBrGqvtCBoe vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nczsEBrGqvtCBoe: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_10_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_eGGpBsfFnpwwbub vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_eGGpBsfFnpwwbub .L_16_blocks_overflow_eGGpBsfFnpwwbub: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_eGGpBsfFnpwwbub: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EcwCefEtlqcfEms subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EcwCefEtlqcfEms .L_small_initial_partial_block_EcwCefEtlqcfEms: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EcwCefEtlqcfEms: orq %r8,%r8 je .L_after_reduction_EcwCefEtlqcfEms vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EcwCefEtlqcfEms: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_11_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_eddhoEuAgjbBjFF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_eddhoEuAgjbBjFF .L_16_blocks_overflow_eddhoEuAgjbBjFF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_eddhoEuAgjbBjFF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wytgrCdaysqdDEF subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wytgrCdaysqdDEF .L_small_initial_partial_block_wytgrCdaysqdDEF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wytgrCdaysqdDEF: orq %r8,%r8 je .L_after_reduction_wytgrCdaysqdDEF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wytgrCdaysqdDEF: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_12_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_bfsFAnmADrmmioq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_bfsFAnmADrmmioq .L_16_blocks_overflow_bfsFAnmADrmmioq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_bfsFAnmADrmmioq: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_pzClwApspseFxiy subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pzClwApspseFxiy .L_small_initial_partial_block_pzClwApspseFxiy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pzClwApspseFxiy: orq %r8,%r8 je .L_after_reduction_pzClwApspseFxiy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_pzClwApspseFxiy: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_13_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_eqddxBoxqiwCsny vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_eqddxBoxqiwCsny .L_16_blocks_overflow_eqddxBoxqiwCsny: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_eqddxBoxqiwCsny: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jtCktBigdCvArrs subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jtCktBigdCvArrs .L_small_initial_partial_block_jtCktBigdCvArrs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jtCktBigdCvArrs: orq %r8,%r8 je .L_after_reduction_jtCktBigdCvArrs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jtCktBigdCvArrs: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_14_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_DAGxccpeauyqpCa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_DAGxccpeauyqpCa .L_16_blocks_overflow_DAGxccpeauyqpCa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_DAGxccpeauyqpCa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_eekywuGEAhgthae subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_eekywuGEAhgthae .L_small_initial_partial_block_eekywuGEAhgthae: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_eekywuGEAhgthae: orq %r8,%r8 je .L_after_reduction_eekywuGEAhgthae vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_eekywuGEAhgthae: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_15_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_xrzdkvEbdpatlsn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_xrzdkvEbdpatlsn .L_16_blocks_overflow_xrzdkvEbdpatlsn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_xrzdkvEbdpatlsn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nvxEscrdCznvhGj subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nvxEscrdCznvhGj .L_small_initial_partial_block_nvxEscrdCznvhGj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nvxEscrdCznvhGj: orq %r8,%r8 je .L_after_reduction_nvxEscrdCznvhGj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nvxEscrdCznvhGj: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_16_vkvGtsckpeodFyd: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_nhkzxmwsyGuskoi vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_nhkzxmwsyGuskoi .L_16_blocks_overflow_nhkzxmwsyGuskoi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_nhkzxmwsyGuskoi: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_ECtspjaqpoxwhnx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ECtspjaqpoxwhnx: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ECtspjaqpoxwhnx: jmp .L_last_blocks_done_vkvGtsckpeodFyd .L_last_num_blocks_is_0_vkvGtsckpeodFyd: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_vkvGtsckpeodFyd: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_jzxBnczDBxGvzop .L_encrypt_16_blocks_jzxBnczDBxGvzop: cmpb $240,%r15b jae .L_16_blocks_overflow_kkhtsxadreytpgc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_kkhtsxadreytpgc .L_16_blocks_overflow_kkhtsxadreytpgc: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_kkhtsxadreytpgc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_CGhwyqzCxCrDAod cmpl $8,%r10d je .L_last_num_blocks_is_8_CGhwyqzCxCrDAod jb .L_last_num_blocks_is_7_1_CGhwyqzCxCrDAod cmpl $12,%r10d je .L_last_num_blocks_is_12_CGhwyqzCxCrDAod jb .L_last_num_blocks_is_11_9_CGhwyqzCxCrDAod cmpl $15,%r10d je .L_last_num_blocks_is_15_CGhwyqzCxCrDAod ja .L_last_num_blocks_is_16_CGhwyqzCxCrDAod cmpl $14,%r10d je .L_last_num_blocks_is_14_CGhwyqzCxCrDAod jmp .L_last_num_blocks_is_13_CGhwyqzCxCrDAod .L_last_num_blocks_is_11_9_CGhwyqzCxCrDAod: cmpl $10,%r10d je .L_last_num_blocks_is_10_CGhwyqzCxCrDAod ja .L_last_num_blocks_is_11_CGhwyqzCxCrDAod jmp .L_last_num_blocks_is_9_CGhwyqzCxCrDAod .L_last_num_blocks_is_7_1_CGhwyqzCxCrDAod: cmpl $4,%r10d je .L_last_num_blocks_is_4_CGhwyqzCxCrDAod jb .L_last_num_blocks_is_3_1_CGhwyqzCxCrDAod cmpl $6,%r10d ja .L_last_num_blocks_is_7_CGhwyqzCxCrDAod je .L_last_num_blocks_is_6_CGhwyqzCxCrDAod jmp .L_last_num_blocks_is_5_CGhwyqzCxCrDAod .L_last_num_blocks_is_3_1_CGhwyqzCxCrDAod: cmpl $2,%r10d ja .L_last_num_blocks_is_3_CGhwyqzCxCrDAod je .L_last_num_blocks_is_2_CGhwyqzCxCrDAod .L_last_num_blocks_is_1_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_ycAFtgAvrzFpmud vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_ycAFtgAvrzFpmud .L_16_blocks_overflow_ycAFtgAvrzFpmud: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_ycAFtgAvrzFpmud: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_CbqAGqoFBCoBcnn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_CbqAGqoFBCoBcnn .L_small_initial_partial_block_CbqAGqoFBCoBcnn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_CbqAGqoFBCoBcnn .L_small_initial_compute_done_CbqAGqoFBCoBcnn: .L_after_reduction_CbqAGqoFBCoBcnn: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_2_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_unaFqvbBnCelmgG vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_unaFqvbBnCelmgG .L_16_blocks_overflow_unaFqvbBnCelmgG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_unaFqvbBnCelmgG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_gzvpemiEleCjEbC subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_gzvpemiEleCjEbC .L_small_initial_partial_block_gzvpemiEleCjEbC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_gzvpemiEleCjEbC: orq %r8,%r8 je .L_after_reduction_gzvpemiEleCjEbC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_gzvpemiEleCjEbC: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_3_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_FzufylrxyerzBEy vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_FzufylrxyerzBEy .L_16_blocks_overflow_FzufylrxyerzBEy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_FzufylrxyerzBEy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_gqwjyzltkrfhGvo subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_gqwjyzltkrfhGvo .L_small_initial_partial_block_gqwjyzltkrfhGvo: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_gqwjyzltkrfhGvo: orq %r8,%r8 je .L_after_reduction_gqwjyzltkrfhGvo vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_gqwjyzltkrfhGvo: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_4_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_FtupvahihsnvuAd vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_FtupvahihsnvuAd .L_16_blocks_overflow_FtupvahihsnvuAd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_FtupvahihsnvuAd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wBowoFhurirchGq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wBowoFhurirchGq .L_small_initial_partial_block_wBowoFhurirchGq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wBowoFhurirchGq: orq %r8,%r8 je .L_after_reduction_wBowoFhurirchGq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wBowoFhurirchGq: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_5_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_uBhGhomDazsjBak vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_uBhGhomDazsjBak .L_16_blocks_overflow_uBhGhomDazsjBak: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_uBhGhomDazsjBak: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_eFbGprqpsBhvBkh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_eFbGprqpsBhvBkh .L_small_initial_partial_block_eFbGprqpsBhvBkh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_eFbGprqpsBhvBkh: orq %r8,%r8 je .L_after_reduction_eFbGprqpsBhvBkh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_eFbGprqpsBhvBkh: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_6_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_mBfhrGpovoncBkc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_mBfhrGpovoncBkc .L_16_blocks_overflow_mBfhrGpovoncBkc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_mBfhrGpovoncBkc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qFilbDGEygcyzzw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qFilbDGEygcyzzw .L_small_initial_partial_block_qFilbDGEygcyzzw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qFilbDGEygcyzzw: orq %r8,%r8 je .L_after_reduction_qFilbDGEygcyzzw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qFilbDGEygcyzzw: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_7_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_FvpewBABrfyByvd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_FvpewBABrfyByvd .L_16_blocks_overflow_FvpewBABrfyByvd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_FvpewBABrfyByvd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vrmegiBFdzfFmfq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vrmegiBFdzfFmfq .L_small_initial_partial_block_vrmegiBFdzfFmfq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vrmegiBFdzfFmfq: orq %r8,%r8 je .L_after_reduction_vrmegiBFdzfFmfq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vrmegiBFdzfFmfq: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_8_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_FsoptjzAkrqyAAr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_FsoptjzAkrqyAAr .L_16_blocks_overflow_FsoptjzAkrqyAAr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_FsoptjzAkrqyAAr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vkFiBjCFtrykuwD subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vkFiBjCFtrykuwD .L_small_initial_partial_block_vkFiBjCFtrykuwD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vkFiBjCFtrykuwD: orq %r8,%r8 je .L_after_reduction_vkFiBjCFtrykuwD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vkFiBjCFtrykuwD: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_9_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_iABBxfvotBEkECx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_iABBxfvotBEkECx .L_16_blocks_overflow_iABBxfvotBEkECx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_iABBxfvotBEkECx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ypbbgpxgCctCtxy subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ypbbgpxgCctCtxy .L_small_initial_partial_block_ypbbgpxgCctCtxy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ypbbgpxgCctCtxy: orq %r8,%r8 je .L_after_reduction_ypbbgpxgCctCtxy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ypbbgpxgCctCtxy: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_10_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_jEngtqCkuniGdjp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_jEngtqCkuniGdjp .L_16_blocks_overflow_jEngtqCkuniGdjp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_jEngtqCkuniGdjp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nffhkznowjoDiCf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nffhkznowjoDiCf .L_small_initial_partial_block_nffhkznowjoDiCf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nffhkznowjoDiCf: orq %r8,%r8 je .L_after_reduction_nffhkznowjoDiCf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nffhkznowjoDiCf: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_11_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_beoirgaAxslixji vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_beoirgaAxslixji .L_16_blocks_overflow_beoirgaAxslixji: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_beoirgaAxslixji: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_cqzlemDcyGkhDnC subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_cqzlemDcyGkhDnC .L_small_initial_partial_block_cqzlemDcyGkhDnC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cqzlemDcyGkhDnC: orq %r8,%r8 je .L_after_reduction_cqzlemDcyGkhDnC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_cqzlemDcyGkhDnC: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_12_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_sxrCycfBickEpCs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_sxrCycfBickEpCs .L_16_blocks_overflow_sxrCycfBickEpCs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_sxrCycfBickEpCs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lzBzlrbzBeACuhk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lzBzlrbzBeACuhk .L_small_initial_partial_block_lzBzlrbzBeACuhk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lzBzlrbzBeACuhk: orq %r8,%r8 je .L_after_reduction_lzBzlrbzBeACuhk vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lzBzlrbzBeACuhk: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_13_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_sesGGmqiCkypotq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_sesGGmqiCkypotq .L_16_blocks_overflow_sesGGmqiCkypotq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_sesGGmqiCkypotq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qihphhEmthsffzk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qihphhEmthsffzk .L_small_initial_partial_block_qihphhEmthsffzk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qihphhEmthsffzk: orq %r8,%r8 je .L_after_reduction_qihphhEmthsffzk vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qihphhEmthsffzk: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_14_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_jqifyxAoeoxkDuE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_jqifyxAoeoxkDuE .L_16_blocks_overflow_jqifyxAoeoxkDuE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_jqifyxAoeoxkDuE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FkuwuuqBpnEvzkd subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FkuwuuqBpnEvzkd .L_small_initial_partial_block_FkuwuuqBpnEvzkd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FkuwuuqBpnEvzkd: orq %r8,%r8 je .L_after_reduction_FkuwuuqBpnEvzkd vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FkuwuuqBpnEvzkd: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_15_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_CBqhusrmEugbwks vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_CBqhusrmEugbwks .L_16_blocks_overflow_CBqhusrmEugbwks: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_CBqhusrmEugbwks: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qEmtvwDozjnABmp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qEmtvwDozjnABmp .L_small_initial_partial_block_qEmtvwDozjnABmp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qEmtvwDozjnABmp: orq %r8,%r8 je .L_after_reduction_qEmtvwDozjnABmp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qEmtvwDozjnABmp: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_16_CGhwyqzCxCrDAod: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_hDfCleGEdmpzBiw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_hDfCleGEdmpzBiw .L_16_blocks_overflow_hDfCleGEdmpzBiw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_hDfCleGEdmpzBiw: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_pybhdxzahdqcprl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pybhdxzahdqcprl: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_pybhdxzahdqcprl: jmp .L_last_blocks_done_CGhwyqzCxCrDAod .L_last_num_blocks_is_0_CGhwyqzCxCrDAod: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_CGhwyqzCxCrDAod: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_jzxBnczDBxGvzop .L_message_below_32_blocks_jzxBnczDBxGvzop: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_dzmCrsBiciGnliE vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .L_skip_hkeys_precomputation_dzmCrsBiciGnliE: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_aFwwehusEvmmDke cmpl $8,%r10d je .L_last_num_blocks_is_8_aFwwehusEvmmDke jb .L_last_num_blocks_is_7_1_aFwwehusEvmmDke cmpl $12,%r10d je .L_last_num_blocks_is_12_aFwwehusEvmmDke jb .L_last_num_blocks_is_11_9_aFwwehusEvmmDke cmpl $15,%r10d je .L_last_num_blocks_is_15_aFwwehusEvmmDke ja .L_last_num_blocks_is_16_aFwwehusEvmmDke cmpl $14,%r10d je .L_last_num_blocks_is_14_aFwwehusEvmmDke jmp .L_last_num_blocks_is_13_aFwwehusEvmmDke .L_last_num_blocks_is_11_9_aFwwehusEvmmDke: cmpl $10,%r10d je .L_last_num_blocks_is_10_aFwwehusEvmmDke ja .L_last_num_blocks_is_11_aFwwehusEvmmDke jmp .L_last_num_blocks_is_9_aFwwehusEvmmDke .L_last_num_blocks_is_7_1_aFwwehusEvmmDke: cmpl $4,%r10d je .L_last_num_blocks_is_4_aFwwehusEvmmDke jb .L_last_num_blocks_is_3_1_aFwwehusEvmmDke cmpl $6,%r10d ja .L_last_num_blocks_is_7_aFwwehusEvmmDke je .L_last_num_blocks_is_6_aFwwehusEvmmDke jmp .L_last_num_blocks_is_5_aFwwehusEvmmDke .L_last_num_blocks_is_3_1_aFwwehusEvmmDke: cmpl $2,%r10d ja .L_last_num_blocks_is_3_aFwwehusEvmmDke je .L_last_num_blocks_is_2_aFwwehusEvmmDke .L_last_num_blocks_is_1_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_BsFiEfmuvxGEGuk vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_BsFiEfmuvxGEGuk .L_16_blocks_overflow_BsFiEfmuvxGEGuk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_BsFiEfmuvxGEGuk: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_zEujlpbgqDyCdvt subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zEujlpbgqDyCdvt .L_small_initial_partial_block_zEujlpbgqDyCdvt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_zEujlpbgqDyCdvt .L_small_initial_compute_done_zEujlpbgqDyCdvt: .L_after_reduction_zEujlpbgqDyCdvt: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_2_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_DrefbggoCuhFosm vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_DrefbggoCuhFosm .L_16_blocks_overflow_DrefbggoCuhFosm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_DrefbggoCuhFosm: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rjkFEDDDoeuwufs subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rjkFEDDDoeuwufs .L_small_initial_partial_block_rjkFEDDDoeuwufs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rjkFEDDDoeuwufs: orq %r8,%r8 je .L_after_reduction_rjkFEDDDoeuwufs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rjkFEDDDoeuwufs: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_3_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_oskEeEmCEGeqECv vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_oskEeEmCEGeqECv .L_16_blocks_overflow_oskEeEmCEGeqECv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_oskEeEmCEGeqECv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_sgeerDwthydzyuy subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_sgeerDwthydzyuy .L_small_initial_partial_block_sgeerDwthydzyuy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_sgeerDwthydzyuy: orq %r8,%r8 je .L_after_reduction_sgeerDwthydzyuy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_sgeerDwthydzyuy: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_4_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_aAxBGtfyfEadAkB vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_aAxBGtfyfEadAkB .L_16_blocks_overflow_aAxBGtfyfEadAkB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_aAxBGtfyfEadAkB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_Dqjcrneuragvwkw subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_Dqjcrneuragvwkw .L_small_initial_partial_block_Dqjcrneuragvwkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_Dqjcrneuragvwkw: orq %r8,%r8 je .L_after_reduction_Dqjcrneuragvwkw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_Dqjcrneuragvwkw: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_5_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_bpEikxmsheidfwq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_bpEikxmsheidfwq .L_16_blocks_overflow_bpEikxmsheidfwq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_bpEikxmsheidfwq: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AGuqEbsAbinbrDm subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AGuqEbsAbinbrDm .L_small_initial_partial_block_AGuqEbsAbinbrDm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AGuqEbsAbinbrDm: orq %r8,%r8 je .L_after_reduction_AGuqEbsAbinbrDm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AGuqEbsAbinbrDm: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_6_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_otEmDDixbpFEmvy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_otEmDDixbpFEmvy .L_16_blocks_overflow_otEmDDixbpFEmvy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_otEmDDixbpFEmvy: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FAvepDmDsogujha subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FAvepDmDsogujha .L_small_initial_partial_block_FAvepDmDsogujha: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FAvepDmDsogujha: orq %r8,%r8 je .L_after_reduction_FAvepDmDsogujha vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FAvepDmDsogujha: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_7_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_kEvFawDBkeclidj vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_kEvFawDBkeclidj .L_16_blocks_overflow_kEvFawDBkeclidj: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_kEvFawDBkeclidj: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jyvbjxevpurblup subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jyvbjxevpurblup .L_small_initial_partial_block_jyvbjxevpurblup: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jyvbjxevpurblup: orq %r8,%r8 je .L_after_reduction_jyvbjxevpurblup vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jyvbjxevpurblup: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_8_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_nfBegzmtymkjkuE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_nfBegzmtymkjkuE .L_16_blocks_overflow_nfBegzmtymkjkuE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_nfBegzmtymkjkuE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ArBbnussymieuyl subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ArBbnussymieuyl .L_small_initial_partial_block_ArBbnussymieuyl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ArBbnussymieuyl: orq %r8,%r8 je .L_after_reduction_ArBbnussymieuyl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ArBbnussymieuyl: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_9_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_zjmfGFrkFzfxxez vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_zjmfGFrkFzfxxez .L_16_blocks_overflow_zjmfGFrkFzfxxez: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_zjmfGFrkFzfxxez: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_CEuslsjdAFEouni subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_CEuslsjdAFEouni .L_small_initial_partial_block_CEuslsjdAFEouni: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_CEuslsjdAFEouni: orq %r8,%r8 je .L_after_reduction_CEuslsjdAFEouni vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_CEuslsjdAFEouni: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_10_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_BvDkzdlGxbqBdwD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_BvDkzdlGxbqBdwD .L_16_blocks_overflow_BvDkzdlGxbqBdwD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_BvDkzdlGxbqBdwD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FEEAGeFDucwexEe subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FEEAGeFDucwexEe .L_small_initial_partial_block_FEEAGeFDucwexEe: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FEEAGeFDucwexEe: orq %r8,%r8 je .L_after_reduction_FEEAGeFDucwexEe vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FEEAGeFDucwexEe: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_11_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_wfjezxDvGpDnoFf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_wfjezxDvGpDnoFf .L_16_blocks_overflow_wfjezxDvGpDnoFf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_wfjezxDvGpDnoFf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qadlBuzdbwfpDef subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qadlBuzdbwfpDef .L_small_initial_partial_block_qadlBuzdbwfpDef: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qadlBuzdbwfpDef: orq %r8,%r8 je .L_after_reduction_qadlBuzdbwfpDef vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qadlBuzdbwfpDef: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_12_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_pbckDbEtDdqavpn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_pbckDbEtDdqavpn .L_16_blocks_overflow_pbckDbEtDdqavpn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_pbckDbEtDdqavpn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_osuccbBAbutpqse subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_osuccbBAbutpqse .L_small_initial_partial_block_osuccbBAbutpqse: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_osuccbBAbutpqse: orq %r8,%r8 je .L_after_reduction_osuccbBAbutpqse vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_osuccbBAbutpqse: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_13_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_oCotpBuspdAtjpe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_oCotpBuspdAtjpe .L_16_blocks_overflow_oCotpBuspdAtjpe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_oCotpBuspdAtjpe: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mmuEdDpgoEjulrs subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mmuEdDpgoEjulrs .L_small_initial_partial_block_mmuEdDpgoEjulrs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mmuEdDpgoEjulrs: orq %r8,%r8 je .L_after_reduction_mmuEdDpgoEjulrs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_mmuEdDpgoEjulrs: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_14_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_bbvjuqrsjgdyCBn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_bbvjuqrsjgdyCBn .L_16_blocks_overflow_bbvjuqrsjgdyCBn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_bbvjuqrsjgdyCBn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_roGxlxzlgsulhzk subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_roGxlxzlgsulhzk .L_small_initial_partial_block_roGxlxzlgsulhzk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_roGxlxzlgsulhzk: orq %r8,%r8 je .L_after_reduction_roGxlxzlgsulhzk vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_roGxlxzlgsulhzk: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_15_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_GriwFAotfyoEekC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_GriwFAotfyoEekC .L_16_blocks_overflow_GriwFAotfyoEekC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_GriwFAotfyoEekC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_huBogwgwhfClyls subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_huBogwgwhfClyls .L_small_initial_partial_block_huBogwgwhfClyls: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_huBogwgwhfClyls: orq %r8,%r8 je .L_after_reduction_huBogwgwhfClyls vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_huBogwgwhfClyls: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_16_aFwwehusEvmmDke: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_sjAcjwAAtCgmwjr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_sjAcjwAAtCgmwjr .L_16_blocks_overflow_sjAcjwAAtCgmwjr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_sjAcjwAAtCgmwjr: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_ayefrejzGqbkfya: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ayefrejzGqbkfya: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ayefrejzGqbkfya: jmp .L_last_blocks_done_aFwwehusEvmmDke .L_last_num_blocks_is_0_aFwwehusEvmmDke: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_aFwwehusEvmmDke: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_jzxBnczDBxGvzop .L_message_below_equal_16_blocks_jzxBnczDBxGvzop: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je .L_small_initial_num_blocks_is_8_wjgmgrFcljfrexe jl .L_small_initial_num_blocks_is_7_1_wjgmgrFcljfrexe cmpq $12,%r12 je .L_small_initial_num_blocks_is_12_wjgmgrFcljfrexe jl .L_small_initial_num_blocks_is_11_9_wjgmgrFcljfrexe cmpq $16,%r12 je .L_small_initial_num_blocks_is_16_wjgmgrFcljfrexe cmpq $15,%r12 je .L_small_initial_num_blocks_is_15_wjgmgrFcljfrexe cmpq $14,%r12 je .L_small_initial_num_blocks_is_14_wjgmgrFcljfrexe jmp .L_small_initial_num_blocks_is_13_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_11_9_wjgmgrFcljfrexe: cmpq $11,%r12 je .L_small_initial_num_blocks_is_11_wjgmgrFcljfrexe cmpq $10,%r12 je .L_small_initial_num_blocks_is_10_wjgmgrFcljfrexe jmp .L_small_initial_num_blocks_is_9_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_7_1_wjgmgrFcljfrexe: cmpq $4,%r12 je .L_small_initial_num_blocks_is_4_wjgmgrFcljfrexe jl .L_small_initial_num_blocks_is_3_1_wjgmgrFcljfrexe cmpq $7,%r12 je .L_small_initial_num_blocks_is_7_wjgmgrFcljfrexe cmpq $6,%r12 je .L_small_initial_num_blocks_is_6_wjgmgrFcljfrexe jmp .L_small_initial_num_blocks_is_5_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_3_1_wjgmgrFcljfrexe: cmpq $3,%r12 je .L_small_initial_num_blocks_is_3_wjgmgrFcljfrexe cmpq $2,%r12 je .L_small_initial_num_blocks_is_2_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_1_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_FfndtjjjGEeCFEr subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FfndtjjjGEeCFEr .L_small_initial_partial_block_FfndtjjjGEeCFEr: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp .L_after_reduction_FfndtjjjGEeCFEr .L_small_initial_compute_done_FfndtjjjGEeCFEr: .L_after_reduction_FfndtjjjGEeCFEr: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_2_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EnhukCdygAFrqou subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EnhukCdygAFrqou .L_small_initial_partial_block_EnhukCdygAFrqou: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EnhukCdygAFrqou: orq %r8,%r8 je .L_after_reduction_EnhukCdygAFrqou vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_EnhukCdygAFrqou: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_3_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_impdlEsbGuAaott subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_impdlEsbGuAaott .L_small_initial_partial_block_impdlEsbGuAaott: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_impdlEsbGuAaott: orq %r8,%r8 je .L_after_reduction_impdlEsbGuAaott vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_impdlEsbGuAaott: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_4_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rBqdjBpBxxfxpoF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rBqdjBpBxxfxpoF .L_small_initial_partial_block_rBqdjBpBxxfxpoF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rBqdjBpBxxfxpoF: orq %r8,%r8 je .L_after_reduction_rBqdjBpBxxfxpoF vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_rBqdjBpBxxfxpoF: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_5_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %xmm29,%xmm3,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vuaskFEqawsiCsj subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vuaskFEqawsiCsj .L_small_initial_partial_block_vuaskFEqawsiCsj: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vuaskFEqawsiCsj: orq %r8,%r8 je .L_after_reduction_vuaskFEqawsiCsj vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_vuaskFEqawsiCsj: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_6_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %ymm29,%ymm3,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dCffBvEqzkjcfvA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dCffBvEqzkjcfvA .L_small_initial_partial_block_dCffBvEqzkjcfvA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dCffBvEqzkjcfvA: orq %r8,%r8 je .L_after_reduction_dCffBvEqzkjcfvA vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_dCffBvEqzkjcfvA: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_7_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yEllnEiichfbFDc subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yEllnEiichfbFDc .L_small_initial_partial_block_yEllnEiichfbFDc: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yEllnEiichfbFDc: orq %r8,%r8 je .L_after_reduction_yEllnEiichfbFDc vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_yEllnEiichfbFDc: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_8_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vBdxtBrlzxbaFcc subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vBdxtBrlzxbaFcc .L_small_initial_partial_block_vBdxtBrlzxbaFcc: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vBdxtBrlzxbaFcc: orq %r8,%r8 je .L_after_reduction_vBdxtBrlzxbaFcc vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_vBdxtBrlzxbaFcc: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_9_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %xmm29,%xmm4,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_scfvxdenebqCdyz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_scfvxdenebqCdyz .L_small_initial_partial_block_scfvxdenebqCdyz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_scfvxdenebqCdyz: orq %r8,%r8 je .L_after_reduction_scfvxdenebqCdyz vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_scfvxdenebqCdyz: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_10_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %ymm29,%ymm4,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_cvcjsgotzqiyevA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_cvcjsgotzqiyevA .L_small_initial_partial_block_cvcjsgotzqiyevA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cvcjsgotzqiyevA: orq %r8,%r8 je .L_after_reduction_cvcjsgotzqiyevA vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_cvcjsgotzqiyevA: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_11_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vuCaGGnzBCpphtu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vuCaGGnzBCpphtu .L_small_initial_partial_block_vuCaGGnzBCpphtu: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vuCaGGnzBCpphtu: orq %r8,%r8 je .L_after_reduction_vuCaGGnzBCpphtu vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_vuCaGGnzBCpphtu: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_12_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qhFhudxmstaFEvA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qhFhudxmstaFEvA .L_small_initial_partial_block_qhFhudxmstaFEvA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qhFhudxmstaFEvA: orq %r8,%r8 je .L_after_reduction_qhFhudxmstaFEvA vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_qhFhudxmstaFEvA: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_13_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %xmm29,%xmm5,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GbEgefaoCcDkbpn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GbEgefaoCcDkbpn .L_small_initial_partial_block_GbEgefaoCcDkbpn: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GbEgefaoCcDkbpn: orq %r8,%r8 je .L_after_reduction_GbEgefaoCcDkbpn vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_GbEgefaoCcDkbpn: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_14_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %ymm29,%ymm5,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hCaaAkupwhFdkkk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hCaaAkupwhFdkkk .L_small_initial_partial_block_hCaaAkupwhFdkkk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hCaaAkupwhFdkkk: orq %r8,%r8 je .L_after_reduction_hCaaAkupwhFdkkk vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_hCaaAkupwhFdkkk: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_15_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kBjkymsezzduvxc subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kBjkymsezzduvxc .L_small_initial_partial_block_kBjkymsezzduvxc: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kBjkymsezzduvxc: orq %r8,%r8 je .L_after_reduction_kBjkymsezzduvxc vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_kBjkymsezzduvxc: jmp .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe .L_small_initial_num_blocks_is_16_wjgmgrFcljfrexe: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_qxCpdapFxyCuqwj: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qxCpdapFxyCuqwj: vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_qxCpdapFxyCuqwj: .L_small_initial_blocks_encrypted_wjgmgrFcljfrexe: .L_ghash_done_jzxBnczDBxGvzop: vmovdqu64 %xmm2,0(%rsi) .L_enc_dec_done_jzxBnczDBxGvzop: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) .L_enc_dec_abort_jzxBnczDBxGvzop: jmp .Lexit_gcm_encrypt .align 32 .Laes_gcm_encrypt_256_avx512: orq %r8,%r8 je .L_enc_dec_abort_ralurfzeatcGxDF xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je .L_partial_block_done_yhoCcfnksexDFbx movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge .L_no_extra_mask_yhoCcfnksexDFbx subq %r13,%r12 .L_no_extra_mask_yhoCcfnksexDFbx: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 vpxorq %xmm3,%xmm14,%xmm14 cmpq $0,%r13 jl .L_partial_incomplete_yhoCcfnksexDFbx .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp .L_enc_dec_done_yhoCcfnksexDFbx .L_partial_incomplete_yhoCcfnksexDFbx: addl %r8d,(%rdx) movq %r8,%rax .L_enc_dec_done_yhoCcfnksexDFbx: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 vpshufb SHUF_MASK(%rip),%xmm3,%xmm3 vpshufb %xmm5,%xmm3,%xmm3 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} .L_partial_block_done_yhoCcfnksexDFbx: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je .L_enc_dec_done_ralurfzeatcGxDF cmpq $256,%r8 jbe .L_message_below_equal_16_blocks_ralurfzeatcGxDF vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae .L_next_16_overflow_FolitFcvmzDtzbD vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_FolitFcvmzDtzbD .L_next_16_overflow_FolitFcvmzDtzbD: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_FolitFcvmzDtzbD: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 208(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 224(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_slhsqgEufGclFec vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) .L_skip_hkeys_precomputation_slhsqgEufGclFec: cmpq $512,%r8 jb .L_message_below_32_blocks_ralurfzeatcGxDF cmpb $240,%r15b jae .L_next_16_overflow_rpkeAoplfcmnoqe vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_rpkeAoplfcmnoqe .L_next_16_overflow_rpkeAoplfcmnoqe: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_rpkeAoplfcmnoqe: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 208(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 224(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz .L_skip_hkeys_precomputation_wDdhvEhGipECfzn vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) .L_skip_hkeys_precomputation_wDdhvEhGipECfzn: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb .L_no_more_big_nblocks_ralurfzeatcGxDF .L_encrypt_big_nblocks_ralurfzeatcGxDF: cmpb $240,%r15b jae .L_16_blocks_overflow_tcpaCgCtyttEnkC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_tcpaCgCtyttEnkC .L_16_blocks_overflow_tcpaCgCtyttEnkC: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_tcpaCgCtyttEnkC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_fefwzzFqtyGgFsy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_fefwzzFqtyGgFsy .L_16_blocks_overflow_fefwzzFqtyGgFsy: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_fefwzzFqtyGgFsy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_poCaishDCqiAtDd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_poCaishDCqiAtDd .L_16_blocks_overflow_poCaishDCqiAtDd: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_poCaishDCqiAtDd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae .L_encrypt_big_nblocks_ralurfzeatcGxDF .L_no_more_big_nblocks_ralurfzeatcGxDF: cmpq $512,%r8 jae .L_encrypt_32_blocks_ralurfzeatcGxDF cmpq $256,%r8 jae .L_encrypt_16_blocks_ralurfzeatcGxDF .L_encrypt_0_blocks_ghash_32_ralurfzeatcGxDF: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_xdvkfswEyEirzwB cmpl $8,%r10d je .L_last_num_blocks_is_8_xdvkfswEyEirzwB jb .L_last_num_blocks_is_7_1_xdvkfswEyEirzwB cmpl $12,%r10d je .L_last_num_blocks_is_12_xdvkfswEyEirzwB jb .L_last_num_blocks_is_11_9_xdvkfswEyEirzwB cmpl $15,%r10d je .L_last_num_blocks_is_15_xdvkfswEyEirzwB ja .L_last_num_blocks_is_16_xdvkfswEyEirzwB cmpl $14,%r10d je .L_last_num_blocks_is_14_xdvkfswEyEirzwB jmp .L_last_num_blocks_is_13_xdvkfswEyEirzwB .L_last_num_blocks_is_11_9_xdvkfswEyEirzwB: cmpl $10,%r10d je .L_last_num_blocks_is_10_xdvkfswEyEirzwB ja .L_last_num_blocks_is_11_xdvkfswEyEirzwB jmp .L_last_num_blocks_is_9_xdvkfswEyEirzwB .L_last_num_blocks_is_7_1_xdvkfswEyEirzwB: cmpl $4,%r10d je .L_last_num_blocks_is_4_xdvkfswEyEirzwB jb .L_last_num_blocks_is_3_1_xdvkfswEyEirzwB cmpl $6,%r10d ja .L_last_num_blocks_is_7_xdvkfswEyEirzwB je .L_last_num_blocks_is_6_xdvkfswEyEirzwB jmp .L_last_num_blocks_is_5_xdvkfswEyEirzwB .L_last_num_blocks_is_3_1_xdvkfswEyEirzwB: cmpl $2,%r10d ja .L_last_num_blocks_is_3_xdvkfswEyEirzwB je .L_last_num_blocks_is_2_xdvkfswEyEirzwB .L_last_num_blocks_is_1_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_igvodhikativhxs vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_igvodhikativhxs .L_16_blocks_overflow_igvodhikativhxs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_igvodhikativhxs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_GcsipbkriaBjvfi subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GcsipbkriaBjvfi .L_small_initial_partial_block_GcsipbkriaBjvfi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_GcsipbkriaBjvfi .L_small_initial_compute_done_GcsipbkriaBjvfi: .L_after_reduction_GcsipbkriaBjvfi: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_2_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_vsprwaoekjwbkng vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_vsprwaoekjwbkng .L_16_blocks_overflow_vsprwaoekjwbkng: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_vsprwaoekjwbkng: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lhbsspkwfiDtCyr subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lhbsspkwfiDtCyr .L_small_initial_partial_block_lhbsspkwfiDtCyr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lhbsspkwfiDtCyr: orq %r8,%r8 je .L_after_reduction_lhbsspkwfiDtCyr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lhbsspkwfiDtCyr: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_3_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_pdiFfjCElAtekEv vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_pdiFfjCElAtekEv .L_16_blocks_overflow_pdiFfjCElAtekEv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_pdiFfjCElAtekEv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iyftGziCGvzBGwp subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iyftGziCGvzBGwp .L_small_initial_partial_block_iyftGziCGvzBGwp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iyftGziCGvzBGwp: orq %r8,%r8 je .L_after_reduction_iyftGziCGvzBGwp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iyftGziCGvzBGwp: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_4_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_giftEyoltvfgggA vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_giftEyoltvfgggA .L_16_blocks_overflow_giftEyoltvfgggA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_giftEyoltvfgggA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hdzCnewjxBbishd subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hdzCnewjxBbishd .L_small_initial_partial_block_hdzCnewjxBbishd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hdzCnewjxBbishd: orq %r8,%r8 je .L_after_reduction_hdzCnewjxBbishd vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_hdzCnewjxBbishd: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_5_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_orpkewzlnxCGshz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_orpkewzlnxCGshz .L_16_blocks_overflow_orpkewzlnxCGshz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_orpkewzlnxCGshz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kyaoueFfnBudEhA subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kyaoueFfnBudEhA .L_small_initial_partial_block_kyaoueFfnBudEhA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kyaoueFfnBudEhA: orq %r8,%r8 je .L_after_reduction_kyaoueFfnBudEhA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kyaoueFfnBudEhA: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_6_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_orictFjAdfigdzk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_orictFjAdfigdzk .L_16_blocks_overflow_orictFjAdfigdzk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_orictFjAdfigdzk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_sgBbGfbjnccbnkh subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_sgBbGfbjnccbnkh .L_small_initial_partial_block_sgBbGfbjnccbnkh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_sgBbGfbjnccbnkh: orq %r8,%r8 je .L_after_reduction_sgBbGfbjnccbnkh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_sgBbGfbjnccbnkh: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_7_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_ivtabDnDqnrGEcy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_ivtabDnDqnrGEcy .L_16_blocks_overflow_ivtabDnDqnrGEcy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_ivtabDnDqnrGEcy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FwberbenvBxEcDE subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FwberbenvBxEcDE .L_small_initial_partial_block_FwberbenvBxEcDE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FwberbenvBxEcDE: orq %r8,%r8 je .L_after_reduction_FwberbenvBxEcDE vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FwberbenvBxEcDE: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_8_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_uBiojDdgtEoAfGd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_uBiojDdgtEoAfGd .L_16_blocks_overflow_uBiojDdgtEoAfGd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_uBiojDdgtEoAfGd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_twDrbrvhowngEDr subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_twDrbrvhowngEDr .L_small_initial_partial_block_twDrbrvhowngEDr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_twDrbrvhowngEDr: orq %r8,%r8 je .L_after_reduction_twDrbrvhowngEDr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_twDrbrvhowngEDr: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_9_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_FqperxgfhBwCqDo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_FqperxgfhBwCqDo .L_16_blocks_overflow_FqperxgfhBwCqDo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_FqperxgfhBwCqDo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_agoyuAkiGwzDjns subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_agoyuAkiGwzDjns .L_small_initial_partial_block_agoyuAkiGwzDjns: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_agoyuAkiGwzDjns: orq %r8,%r8 je .L_after_reduction_agoyuAkiGwzDjns vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_agoyuAkiGwzDjns: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_10_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_bvimoanuboioxom vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_bvimoanuboioxom .L_16_blocks_overflow_bvimoanuboioxom: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_bvimoanuboioxom: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kblafGutCsvisjA subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kblafGutCsvisjA .L_small_initial_partial_block_kblafGutCsvisjA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kblafGutCsvisjA: orq %r8,%r8 je .L_after_reduction_kblafGutCsvisjA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kblafGutCsvisjA: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_11_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_DcdigDqdkAmpala vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_DcdigDqdkAmpala .L_16_blocks_overflow_DcdigDqdkAmpala: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_DcdigDqdkAmpala: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lFojEFrDvGhrqGC subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lFojEFrDvGhrqGC .L_small_initial_partial_block_lFojEFrDvGhrqGC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lFojEFrDvGhrqGC: orq %r8,%r8 je .L_after_reduction_lFojEFrDvGhrqGC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lFojEFrDvGhrqGC: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_12_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_ijmafkyicqbAgov vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_ijmafkyicqbAgov .L_16_blocks_overflow_ijmafkyicqbAgov: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_ijmafkyicqbAgov: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lbhyvEuvxtzgCqA subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lbhyvEuvxtzgCqA .L_small_initial_partial_block_lbhyvEuvxtzgCqA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lbhyvEuvxtzgCqA: orq %r8,%r8 je .L_after_reduction_lbhyvEuvxtzgCqA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lbhyvEuvxtzgCqA: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_13_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_xewjdgAADiucjCd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_xewjdgAADiucjCd .L_16_blocks_overflow_xewjdgAADiucjCd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_xewjdgAADiucjCd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ihgbCttclcmDtmF subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ihgbCttclcmDtmF .L_small_initial_partial_block_ihgbCttclcmDtmF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ihgbCttclcmDtmF: orq %r8,%r8 je .L_after_reduction_ihgbCttclcmDtmF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ihgbCttclcmDtmF: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_14_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_uxvkthhndspgdct vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_uxvkthhndspgdct .L_16_blocks_overflow_uxvkthhndspgdct: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_uxvkthhndspgdct: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_okkABmocyzkldgz subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_okkABmocyzkldgz .L_small_initial_partial_block_okkABmocyzkldgz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_okkABmocyzkldgz: orq %r8,%r8 je .L_after_reduction_okkABmocyzkldgz vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_okkABmocyzkldgz: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_15_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_fdeajBtuhuyobdz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_fdeajBtuhuyobdz .L_16_blocks_overflow_fdeajBtuhuyobdz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_fdeajBtuhuyobdz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ltEnnExvFfBwyxa subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ltEnnExvFfBwyxa .L_small_initial_partial_block_ltEnnExvFfBwyxa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ltEnnExvFfBwyxa: orq %r8,%r8 je .L_after_reduction_ltEnnExvFfBwyxa vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ltEnnExvFfBwyxa: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_16_xdvkfswEyEirzwB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_mxnyyrjuxpBhloh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_mxnyyrjuxpBhloh .L_16_blocks_overflow_mxnyyrjuxpBhloh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_mxnyyrjuxpBhloh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_CFywctAlrBmkufB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_CFywctAlrBmkufB: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_CFywctAlrBmkufB: jmp .L_last_blocks_done_xdvkfswEyEirzwB .L_last_num_blocks_is_0_xdvkfswEyEirzwB: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_xdvkfswEyEirzwB: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_ralurfzeatcGxDF .L_encrypt_32_blocks_ralurfzeatcGxDF: cmpb $240,%r15b jae .L_16_blocks_overflow_maxnEmGesnybyGw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_maxnEmGesnybyGw .L_16_blocks_overflow_maxnEmGesnybyGw: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_maxnEmGesnybyGw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_caDkotybClbwqcs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_caDkotybClbwqcs .L_16_blocks_overflow_caDkotybClbwqcs: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_caDkotybClbwqcs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_CpuqkplkrGqAlEE cmpl $8,%r10d je .L_last_num_blocks_is_8_CpuqkplkrGqAlEE jb .L_last_num_blocks_is_7_1_CpuqkplkrGqAlEE cmpl $12,%r10d je .L_last_num_blocks_is_12_CpuqkplkrGqAlEE jb .L_last_num_blocks_is_11_9_CpuqkplkrGqAlEE cmpl $15,%r10d je .L_last_num_blocks_is_15_CpuqkplkrGqAlEE ja .L_last_num_blocks_is_16_CpuqkplkrGqAlEE cmpl $14,%r10d je .L_last_num_blocks_is_14_CpuqkplkrGqAlEE jmp .L_last_num_blocks_is_13_CpuqkplkrGqAlEE .L_last_num_blocks_is_11_9_CpuqkplkrGqAlEE: cmpl $10,%r10d je .L_last_num_blocks_is_10_CpuqkplkrGqAlEE ja .L_last_num_blocks_is_11_CpuqkplkrGqAlEE jmp .L_last_num_blocks_is_9_CpuqkplkrGqAlEE .L_last_num_blocks_is_7_1_CpuqkplkrGqAlEE: cmpl $4,%r10d je .L_last_num_blocks_is_4_CpuqkplkrGqAlEE jb .L_last_num_blocks_is_3_1_CpuqkplkrGqAlEE cmpl $6,%r10d ja .L_last_num_blocks_is_7_CpuqkplkrGqAlEE je .L_last_num_blocks_is_6_CpuqkplkrGqAlEE jmp .L_last_num_blocks_is_5_CpuqkplkrGqAlEE .L_last_num_blocks_is_3_1_CpuqkplkrGqAlEE: cmpl $2,%r10d ja .L_last_num_blocks_is_3_CpuqkplkrGqAlEE je .L_last_num_blocks_is_2_CpuqkplkrGqAlEE .L_last_num_blocks_is_1_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_alDzwCfDlrwfuue vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_alDzwCfDlrwfuue .L_16_blocks_overflow_alDzwCfDlrwfuue: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_alDzwCfDlrwfuue: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_iBlFpkcubprtgpj subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iBlFpkcubprtgpj .L_small_initial_partial_block_iBlFpkcubprtgpj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_iBlFpkcubprtgpj .L_small_initial_compute_done_iBlFpkcubprtgpj: .L_after_reduction_iBlFpkcubprtgpj: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_2_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_nCqcfaumojsjgbp vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_nCqcfaumojsjgbp .L_16_blocks_overflow_nCqcfaumojsjgbp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_nCqcfaumojsjgbp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yceinkEjzFdqAeG subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yceinkEjzFdqAeG .L_small_initial_partial_block_yceinkEjzFdqAeG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yceinkEjzFdqAeG: orq %r8,%r8 je .L_after_reduction_yceinkEjzFdqAeG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yceinkEjzFdqAeG: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_3_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_uwpbmorybawstbl vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_uwpbmorybawstbl .L_16_blocks_overflow_uwpbmorybawstbl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_uwpbmorybawstbl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_sCduuopFvdCBjgG subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_sCduuopFvdCBjgG .L_small_initial_partial_block_sCduuopFvdCBjgG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_sCduuopFvdCBjgG: orq %r8,%r8 je .L_after_reduction_sCduuopFvdCBjgG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_sCduuopFvdCBjgG: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_4_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_vadkquwycFnaotd vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_vadkquwycFnaotd .L_16_blocks_overflow_vadkquwycFnaotd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_vadkquwycFnaotd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ivhaorpFqBawvwj subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ivhaorpFqBawvwj .L_small_initial_partial_block_ivhaorpFqBawvwj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ivhaorpFqBawvwj: orq %r8,%r8 je .L_after_reduction_ivhaorpFqBawvwj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ivhaorpFqBawvwj: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_5_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_aFkFaFcofvloukl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_aFkFaFcofvloukl .L_16_blocks_overflow_aFkFaFcofvloukl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_aFkFaFcofvloukl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DnveyAaCeDgzdCr subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DnveyAaCeDgzdCr .L_small_initial_partial_block_DnveyAaCeDgzdCr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DnveyAaCeDgzdCr: orq %r8,%r8 je .L_after_reduction_DnveyAaCeDgzdCr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DnveyAaCeDgzdCr: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_6_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_hyGBuzayqDhhsut vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_hyGBuzayqDhhsut .L_16_blocks_overflow_hyGBuzayqDhhsut: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_hyGBuzayqDhhsut: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FildbillAFDaont subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FildbillAFDaont .L_small_initial_partial_block_FildbillAFDaont: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FildbillAFDaont: orq %r8,%r8 je .L_after_reduction_FildbillAFDaont vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FildbillAFDaont: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_7_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_wfwrxhyCBsGqfaa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_wfwrxhyCBsGqfaa .L_16_blocks_overflow_wfwrxhyCBsGqfaa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_wfwrxhyCBsGqfaa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dFDhkscmwibqAtn subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dFDhkscmwibqAtn .L_small_initial_partial_block_dFDhkscmwibqAtn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dFDhkscmwibqAtn: orq %r8,%r8 je .L_after_reduction_dFDhkscmwibqAtn vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dFDhkscmwibqAtn: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_8_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_nwCspduhyDCpabc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_nwCspduhyDCpabc .L_16_blocks_overflow_nwCspduhyDCpabc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_nwCspduhyDCpabc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_atEEroEqtkbEDxn subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_atEEroEqtkbEDxn .L_small_initial_partial_block_atEEroEqtkbEDxn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_atEEroEqtkbEDxn: orq %r8,%r8 je .L_after_reduction_atEEroEqtkbEDxn vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_atEEroEqtkbEDxn: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_9_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_FtfeaayDywckyfd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_FtfeaayDywckyfd .L_16_blocks_overflow_FtfeaayDywckyfd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_FtfeaayDywckyfd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nvbkpkefGjFjFfs subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nvbkpkefGjFjFfs .L_small_initial_partial_block_nvbkpkefGjFjFfs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nvbkpkefGjFjFfs: orq %r8,%r8 je .L_after_reduction_nvbkpkefGjFjFfs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nvbkpkefGjFjFfs: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_10_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_rwkpzgCdusgbwpC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_rwkpzgCdusgbwpC .L_16_blocks_overflow_rwkpzgCdusgbwpC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_rwkpzgCdusgbwpC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tEmDckpEuqBsraf subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tEmDckpEuqBsraf .L_small_initial_partial_block_tEmDckpEuqBsraf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tEmDckpEuqBsraf: orq %r8,%r8 je .L_after_reduction_tEmDckpEuqBsraf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tEmDckpEuqBsraf: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_11_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_lwGByppsljaznxt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_lwGByppsljaznxt .L_16_blocks_overflow_lwGByppsljaznxt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_lwGByppsljaznxt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ilixxtsukzdoAtA subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ilixxtsukzdoAtA .L_small_initial_partial_block_ilixxtsukzdoAtA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ilixxtsukzdoAtA: orq %r8,%r8 je .L_after_reduction_ilixxtsukzdoAtA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ilixxtsukzdoAtA: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_12_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_jbqznyehrlCBlqk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_jbqznyehrlCBlqk .L_16_blocks_overflow_jbqznyehrlCBlqk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_jbqznyehrlCBlqk: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wctpdEkyEmpBhlB subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wctpdEkyEmpBhlB .L_small_initial_partial_block_wctpdEkyEmpBhlB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wctpdEkyEmpBhlB: orq %r8,%r8 je .L_after_reduction_wctpdEkyEmpBhlB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wctpdEkyEmpBhlB: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_13_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_zfoiakgFjhncFgz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_zfoiakgFjhncFgz .L_16_blocks_overflow_zfoiakgFjhncFgz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_zfoiakgFjhncFgz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ECBllyApvBoFquD subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ECBllyApvBoFquD .L_small_initial_partial_block_ECBllyApvBoFquD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ECBllyApvBoFquD: orq %r8,%r8 je .L_after_reduction_ECBllyApvBoFquD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ECBllyApvBoFquD: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_14_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_boaouDrBeEmAnwp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_boaouDrBeEmAnwp .L_16_blocks_overflow_boaouDrBeEmAnwp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_boaouDrBeEmAnwp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_CjBwxsGswEoCtpA subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_CjBwxsGswEoCtpA .L_small_initial_partial_block_CjBwxsGswEoCtpA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_CjBwxsGswEoCtpA: orq %r8,%r8 je .L_after_reduction_CjBwxsGswEoCtpA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_CjBwxsGswEoCtpA: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_15_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_mFdcfdxbaoeAcmw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_mFdcfdxbaoeAcmw .L_16_blocks_overflow_mFdcfdxbaoeAcmw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_mFdcfdxbaoeAcmw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nkpoxiswyhgqlsf subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nkpoxiswyhgqlsf .L_small_initial_partial_block_nkpoxiswyhgqlsf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nkpoxiswyhgqlsf: orq %r8,%r8 je .L_after_reduction_nkpoxiswyhgqlsf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nkpoxiswyhgqlsf: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_16_CpuqkplkrGqAlEE: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_dhDlEwplftmrFtf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_dhDlEwplftmrFtf .L_16_blocks_overflow_dhDlEwplftmrFtf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_dhDlEwplftmrFtf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_iuDhkykBcvvzBFb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iuDhkykBcvvzBFb: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iuDhkykBcvvzBFb: jmp .L_last_blocks_done_CpuqkplkrGqAlEE .L_last_num_blocks_is_0_CpuqkplkrGqAlEE: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_CpuqkplkrGqAlEE: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_ralurfzeatcGxDF .L_encrypt_16_blocks_ralurfzeatcGxDF: cmpb $240,%r15b jae .L_16_blocks_overflow_FGbkcFatDxoofCE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_FGbkcFatDxoofCE .L_16_blocks_overflow_FGbkcFatDxoofCE: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_FGbkcFatDxoofCE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_FesvdmtDyerGEdv cmpl $8,%r10d je .L_last_num_blocks_is_8_FesvdmtDyerGEdv jb .L_last_num_blocks_is_7_1_FesvdmtDyerGEdv cmpl $12,%r10d je .L_last_num_blocks_is_12_FesvdmtDyerGEdv jb .L_last_num_blocks_is_11_9_FesvdmtDyerGEdv cmpl $15,%r10d je .L_last_num_blocks_is_15_FesvdmtDyerGEdv ja .L_last_num_blocks_is_16_FesvdmtDyerGEdv cmpl $14,%r10d je .L_last_num_blocks_is_14_FesvdmtDyerGEdv jmp .L_last_num_blocks_is_13_FesvdmtDyerGEdv .L_last_num_blocks_is_11_9_FesvdmtDyerGEdv: cmpl $10,%r10d je .L_last_num_blocks_is_10_FesvdmtDyerGEdv ja .L_last_num_blocks_is_11_FesvdmtDyerGEdv jmp .L_last_num_blocks_is_9_FesvdmtDyerGEdv .L_last_num_blocks_is_7_1_FesvdmtDyerGEdv: cmpl $4,%r10d je .L_last_num_blocks_is_4_FesvdmtDyerGEdv jb .L_last_num_blocks_is_3_1_FesvdmtDyerGEdv cmpl $6,%r10d ja .L_last_num_blocks_is_7_FesvdmtDyerGEdv je .L_last_num_blocks_is_6_FesvdmtDyerGEdv jmp .L_last_num_blocks_is_5_FesvdmtDyerGEdv .L_last_num_blocks_is_3_1_FesvdmtDyerGEdv: cmpl $2,%r10d ja .L_last_num_blocks_is_3_FesvdmtDyerGEdv je .L_last_num_blocks_is_2_FesvdmtDyerGEdv .L_last_num_blocks_is_1_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_cmjbanhfxFrrojy vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_cmjbanhfxFrrojy .L_16_blocks_overflow_cmjbanhfxFrrojy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_cmjbanhfxFrrojy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_EGeAwrlgtsiFljf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EGeAwrlgtsiFljf .L_small_initial_partial_block_EGeAwrlgtsiFljf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_EGeAwrlgtsiFljf .L_small_initial_compute_done_EGeAwrlgtsiFljf: .L_after_reduction_EGeAwrlgtsiFljf: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_2_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_EgjyoropybwcGcn vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_EgjyoropybwcGcn .L_16_blocks_overflow_EgjyoropybwcGcn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_EgjyoropybwcGcn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rrppsiDyiwwbqbf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rrppsiDyiwwbqbf .L_small_initial_partial_block_rrppsiDyiwwbqbf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rrppsiDyiwwbqbf: orq %r8,%r8 je .L_after_reduction_rrppsiDyiwwbqbf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rrppsiDyiwwbqbf: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_3_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_wGGmGvscmpGfnny vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_wGGmGvscmpGfnny .L_16_blocks_overflow_wGGmGvscmpGfnny: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_wGGmGvscmpGfnny: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_pFvDrkCwqwAamnn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pFvDrkCwqwAamnn .L_small_initial_partial_block_pFvDrkCwqwAamnn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pFvDrkCwqwAamnn: orq %r8,%r8 je .L_after_reduction_pFvDrkCwqwAamnn vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_pFvDrkCwqwAamnn: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_4_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_lnowafuogaacgct vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_lnowafuogaacgct .L_16_blocks_overflow_lnowafuogaacgct: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_lnowafuogaacgct: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yqgqaEocfqiFkDi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yqgqaEocfqiFkDi .L_small_initial_partial_block_yqgqaEocfqiFkDi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yqgqaEocfqiFkDi: orq %r8,%r8 je .L_after_reduction_yqgqaEocfqiFkDi vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yqgqaEocfqiFkDi: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_5_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_trmgpGgtzmsExiu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_trmgpGgtzmsExiu .L_16_blocks_overflow_trmgpGgtzmsExiu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_trmgpGgtzmsExiu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vuyopzdEphdnacq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vuyopzdEphdnacq .L_small_initial_partial_block_vuyopzdEphdnacq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vuyopzdEphdnacq: orq %r8,%r8 je .L_after_reduction_vuyopzdEphdnacq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vuyopzdEphdnacq: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_6_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_FwaeBcDAewBtpAB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_FwaeBcDAewBtpAB .L_16_blocks_overflow_FwaeBcDAewBtpAB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_FwaeBcDAewBtpAB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rdtAwwiDCCqmaAa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rdtAwwiDCCqmaAa .L_small_initial_partial_block_rdtAwwiDCCqmaAa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rdtAwwiDCCqmaAa: orq %r8,%r8 je .L_after_reduction_rdtAwwiDCCqmaAa vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rdtAwwiDCCqmaAa: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_7_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_AnyscuqxAspkzsl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_AnyscuqxAspkzsl .L_16_blocks_overflow_AnyscuqxAspkzsl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_AnyscuqxAspkzsl: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_digiiCypcjzldxx subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_digiiCypcjzldxx .L_small_initial_partial_block_digiiCypcjzldxx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_digiiCypcjzldxx: orq %r8,%r8 je .L_after_reduction_digiiCypcjzldxx vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_digiiCypcjzldxx: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_8_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_cgqpkbbBmprdEnv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_cgqpkbbBmprdEnv .L_16_blocks_overflow_cgqpkbbBmprdEnv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_cgqpkbbBmprdEnv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_sEakaptGjtmocyA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_sEakaptGjtmocyA .L_small_initial_partial_block_sEakaptGjtmocyA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_sEakaptGjtmocyA: orq %r8,%r8 je .L_after_reduction_sEakaptGjtmocyA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_sEakaptGjtmocyA: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_9_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_ovcajrDEfpdjwcF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_ovcajrDEfpdjwcF .L_16_blocks_overflow_ovcajrDEfpdjwcF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_ovcajrDEfpdjwcF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wwoArvEqahCsDin subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wwoArvEqahCsDin .L_small_initial_partial_block_wwoArvEqahCsDin: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wwoArvEqahCsDin: orq %r8,%r8 je .L_after_reduction_wwoArvEqahCsDin vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wwoArvEqahCsDin: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_10_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_xyisBwjDghCtkcq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_xyisBwjDghCtkcq .L_16_blocks_overflow_xyisBwjDghCtkcq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_xyisBwjDghCtkcq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_maGzmchmgBAsGGp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_maGzmchmgBAsGGp .L_small_initial_partial_block_maGzmchmgBAsGGp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_maGzmchmgBAsGGp: orq %r8,%r8 je .L_after_reduction_maGzmchmgBAsGGp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_maGzmchmgBAsGGp: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_11_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_oCaueqhtnkiqikA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_oCaueqhtnkiqikA .L_16_blocks_overflow_oCaueqhtnkiqikA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_oCaueqhtnkiqikA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rwuhidithmAtnfF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rwuhidithmAtnfF .L_small_initial_partial_block_rwuhidithmAtnfF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rwuhidithmAtnfF: orq %r8,%r8 je .L_after_reduction_rwuhidithmAtnfF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rwuhidithmAtnfF: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_12_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_xwjsvxAnBhmckaz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_xwjsvxAnBhmckaz .L_16_blocks_overflow_xwjsvxAnBhmckaz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_xwjsvxAnBhmckaz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_glqGCCyiublvFga subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_glqGCCyiublvFga .L_small_initial_partial_block_glqGCCyiublvFga: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_glqGCCyiublvFga: orq %r8,%r8 je .L_after_reduction_glqGCCyiublvFga vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_glqGCCyiublvFga: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_13_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_jfgktdduAaBgqFv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_jfgktdduAaBgqFv .L_16_blocks_overflow_jfgktdduAaBgqFv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_jfgktdduAaBgqFv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_deedxboGavqljAa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_deedxboGavqljAa .L_small_initial_partial_block_deedxboGavqljAa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_deedxboGavqljAa: orq %r8,%r8 je .L_after_reduction_deedxboGavqljAa vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_deedxboGavqljAa: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_14_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_xdtrxodfgwcifbm vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_xdtrxodfgwcifbm .L_16_blocks_overflow_xdtrxodfgwcifbm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_xdtrxodfgwcifbm: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lhfnbffaAGncxjA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lhfnbffaAGncxjA .L_small_initial_partial_block_lhfnbffaAGncxjA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lhfnbffaAGncxjA: orq %r8,%r8 je .L_after_reduction_lhfnbffaAGncxjA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lhfnbffaAGncxjA: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_15_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_FrBtEqtdGyajfFu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_FrBtEqtdGyajfFu .L_16_blocks_overflow_FrBtEqtdGyajfFu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_FrBtEqtdGyajfFu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DlintgAmylyraad subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DlintgAmylyraad .L_small_initial_partial_block_DlintgAmylyraad: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DlintgAmylyraad: orq %r8,%r8 je .L_after_reduction_DlintgAmylyraad vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DlintgAmylyraad: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_16_FesvdmtDyerGEdv: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_ofhxurlakbuiiab vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_ofhxurlakbuiiab .L_16_blocks_overflow_ofhxurlakbuiiab: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_ofhxurlakbuiiab: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_niAfluBnEgrukbj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_niAfluBnEgrukbj: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_niAfluBnEgrukbj: jmp .L_last_blocks_done_FesvdmtDyerGEdv .L_last_num_blocks_is_0_FesvdmtDyerGEdv: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_FesvdmtDyerGEdv: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_ralurfzeatcGxDF .L_message_below_32_blocks_ralurfzeatcGxDF: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_BiAvfDwrflaDzBx vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .L_skip_hkeys_precomputation_BiAvfDwrflaDzBx: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_pnzuldcucuyingq cmpl $8,%r10d je .L_last_num_blocks_is_8_pnzuldcucuyingq jb .L_last_num_blocks_is_7_1_pnzuldcucuyingq cmpl $12,%r10d je .L_last_num_blocks_is_12_pnzuldcucuyingq jb .L_last_num_blocks_is_11_9_pnzuldcucuyingq cmpl $15,%r10d je .L_last_num_blocks_is_15_pnzuldcucuyingq ja .L_last_num_blocks_is_16_pnzuldcucuyingq cmpl $14,%r10d je .L_last_num_blocks_is_14_pnzuldcucuyingq jmp .L_last_num_blocks_is_13_pnzuldcucuyingq .L_last_num_blocks_is_11_9_pnzuldcucuyingq: cmpl $10,%r10d je .L_last_num_blocks_is_10_pnzuldcucuyingq ja .L_last_num_blocks_is_11_pnzuldcucuyingq jmp .L_last_num_blocks_is_9_pnzuldcucuyingq .L_last_num_blocks_is_7_1_pnzuldcucuyingq: cmpl $4,%r10d je .L_last_num_blocks_is_4_pnzuldcucuyingq jb .L_last_num_blocks_is_3_1_pnzuldcucuyingq cmpl $6,%r10d ja .L_last_num_blocks_is_7_pnzuldcucuyingq je .L_last_num_blocks_is_6_pnzuldcucuyingq jmp .L_last_num_blocks_is_5_pnzuldcucuyingq .L_last_num_blocks_is_3_1_pnzuldcucuyingq: cmpl $2,%r10d ja .L_last_num_blocks_is_3_pnzuldcucuyingq je .L_last_num_blocks_is_2_pnzuldcucuyingq .L_last_num_blocks_is_1_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_zDsbocmrpEvnicC vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_zDsbocmrpEvnicC .L_16_blocks_overflow_zDsbocmrpEvnicC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_zDsbocmrpEvnicC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_GkBxoqrqufclksk subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GkBxoqrqufclksk .L_small_initial_partial_block_GkBxoqrqufclksk: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_GkBxoqrqufclksk .L_small_initial_compute_done_GkBxoqrqufclksk: .L_after_reduction_GkBxoqrqufclksk: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_2_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_fkqtFBuohiwoapu vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_fkqtFBuohiwoapu .L_16_blocks_overflow_fkqtFBuohiwoapu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_fkqtFBuohiwoapu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vxviotokbwbgyEt subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vxviotokbwbgyEt .L_small_initial_partial_block_vxviotokbwbgyEt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vxviotokbwbgyEt: orq %r8,%r8 je .L_after_reduction_vxviotokbwbgyEt vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vxviotokbwbgyEt: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_3_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_myfxreEhmAEiFvd vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_myfxreEhmAEiFvd .L_16_blocks_overflow_myfxreEhmAEiFvd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_myfxreEhmAEiFvd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_cvvlAqBdybFdjiy subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_cvvlAqBdybFdjiy .L_small_initial_partial_block_cvvlAqBdybFdjiy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cvvlAqBdybFdjiy: orq %r8,%r8 je .L_after_reduction_cvvlAqBdybFdjiy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_cvvlAqBdybFdjiy: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_4_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_EshcbGrbbBjGmFs vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_EshcbGrbbBjGmFs .L_16_blocks_overflow_EshcbGrbbBjGmFs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_EshcbGrbbBjGmFs: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GfeakfatCkpGtjm subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GfeakfatCkpGtjm .L_small_initial_partial_block_GfeakfatCkpGtjm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GfeakfatCkpGtjm: orq %r8,%r8 je .L_after_reduction_GfeakfatCkpGtjm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GfeakfatCkpGtjm: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_5_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_rBzncCcAACDmBwu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_rBzncCcAACDmBwu .L_16_blocks_overflow_rBzncCcAACDmBwu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_rBzncCcAACDmBwu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %xmm29,%xmm3,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AstwCzCrFBsuGAb subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AstwCzCrFBsuGAb .L_small_initial_partial_block_AstwCzCrFBsuGAb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AstwCzCrFBsuGAb: orq %r8,%r8 je .L_after_reduction_AstwCzCrFBsuGAb vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AstwCzCrFBsuGAb: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_6_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_yghnlDweoeGyiyD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_yghnlDweoeGyiyD .L_16_blocks_overflow_yghnlDweoeGyiyD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_yghnlDweoeGyiyD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %ymm29,%ymm3,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nAqArzgnghAposf subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nAqArzgnghAposf .L_small_initial_partial_block_nAqArzgnghAposf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nAqArzgnghAposf: orq %r8,%r8 je .L_after_reduction_nAqArzgnghAposf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nAqArzgnghAposf: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_7_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_stoalvbzsyrkrBC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_stoalvbzsyrkrBC .L_16_blocks_overflow_stoalvbzsyrkrBC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_stoalvbzsyrkrBC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tvAfmkadqFgykwd subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tvAfmkadqFgykwd .L_small_initial_partial_block_tvAfmkadqFgykwd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tvAfmkadqFgykwd: orq %r8,%r8 je .L_after_reduction_tvAfmkadqFgykwd vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tvAfmkadqFgykwd: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_8_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_miFDzcCBFGrssiv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_miFDzcCBFGrssiv .L_16_blocks_overflow_miFDzcCBFGrssiv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_miFDzcCBFGrssiv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dnvdvgGCEkvixhc subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dnvdvgGCEkvixhc .L_small_initial_partial_block_dnvdvgGCEkvixhc: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dnvdvgGCEkvixhc: orq %r8,%r8 je .L_after_reduction_dnvdvgGCEkvixhc vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dnvdvgGCEkvixhc: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_9_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_lkCdskAdsidpkuw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_lkCdskAdsidpkuw .L_16_blocks_overflow_lkCdskAdsidpkuw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_lkCdskAdsidpkuw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %xmm29,%xmm4,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BvuayrqCbqotfzl subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BvuayrqCbqotfzl .L_small_initial_partial_block_BvuayrqCbqotfzl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BvuayrqCbqotfzl: orq %r8,%r8 je .L_after_reduction_BvuayrqCbqotfzl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BvuayrqCbqotfzl: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_10_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_hktAeBlvDcCnios vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_hktAeBlvDcCnios .L_16_blocks_overflow_hktAeBlvDcCnios: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_hktAeBlvDcCnios: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %ymm29,%ymm4,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qDkapAwwDbttzcj subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qDkapAwwDbttzcj .L_small_initial_partial_block_qDkapAwwDbttzcj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qDkapAwwDbttzcj: orq %r8,%r8 je .L_after_reduction_qDkapAwwDbttzcj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qDkapAwwDbttzcj: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_11_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_bblFcfwEgdzswCm vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_bblFcfwEgdzswCm .L_16_blocks_overflow_bblFcfwEgdzswCm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_bblFcfwEgdzswCm: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hxkcdbddneddmzb subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hxkcdbddneddmzb .L_small_initial_partial_block_hxkcdbddneddmzb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hxkcdbddneddmzb: orq %r8,%r8 je .L_after_reduction_hxkcdbddneddmzb vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_hxkcdbddneddmzb: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_12_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_qmmgmehghErCGvF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_qmmgmehghErCGvF .L_16_blocks_overflow_qmmgmehghErCGvF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_qmmgmehghErCGvF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_bGACCFiDoxkcuwq subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_bGACCFiDoxkcuwq .L_small_initial_partial_block_bGACCFiDoxkcuwq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bGACCFiDoxkcuwq: orq %r8,%r8 je .L_after_reduction_bGACCFiDoxkcuwq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_bGACCFiDoxkcuwq: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_13_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_dulzkutdgjakGvB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_dulzkutdgjakGvB .L_16_blocks_overflow_dulzkutdgjakGvB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_dulzkutdgjakGvB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %xmm29,%xmm5,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dmbcxBEdtigsClF subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dmbcxBEdtigsClF .L_small_initial_partial_block_dmbcxBEdtigsClF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dmbcxBEdtigsClF: orq %r8,%r8 je .L_after_reduction_dmbcxBEdtigsClF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dmbcxBEdtigsClF: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_14_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_nntbrGkellunBas vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_nntbrGkellunBas .L_16_blocks_overflow_nntbrGkellunBas: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_nntbrGkellunBas: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %ymm29,%ymm5,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_aopDguzqabquECi subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_aopDguzqabquECi .L_small_initial_partial_block_aopDguzqabquECi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_aopDguzqabquECi: orq %r8,%r8 je .L_after_reduction_aopDguzqabquECi vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_aopDguzqabquECi: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_15_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_gqGDtzmCceFkfal vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_gqGDtzmCceFkfal .L_16_blocks_overflow_gqGDtzmCceFkfal: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_gqGDtzmCceFkfal: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kkvugeyiFsBldFy subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kkvugeyiFsBldFy .L_small_initial_partial_block_kkvugeyiFsBldFy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kkvugeyiFsBldFy: orq %r8,%r8 je .L_after_reduction_kkvugeyiFsBldFy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kkvugeyiFsBldFy: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_16_pnzuldcucuyingq: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_nnArmAxpgvlqCpA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_nnArmAxpgvlqCpA .L_16_blocks_overflow_nnArmAxpgvlqCpA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_nnArmAxpgvlqCpA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm17 vpshufb %zmm29,%zmm3,%zmm19 vpshufb %zmm29,%zmm4,%zmm20 vpshufb %zmm29,%zmm5,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_uqdvluxFgGqdFqv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_uqdvluxFgGqdFqv: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_uqdvluxFgGqdFqv: jmp .L_last_blocks_done_pnzuldcucuyingq .L_last_num_blocks_is_0_pnzuldcucuyingq: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_pnzuldcucuyingq: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_ralurfzeatcGxDF .L_message_below_equal_16_blocks_ralurfzeatcGxDF: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je .L_small_initial_num_blocks_is_8_hdjaAabmubhzgrE jl .L_small_initial_num_blocks_is_7_1_hdjaAabmubhzgrE cmpq $12,%r12 je .L_small_initial_num_blocks_is_12_hdjaAabmubhzgrE jl .L_small_initial_num_blocks_is_11_9_hdjaAabmubhzgrE cmpq $16,%r12 je .L_small_initial_num_blocks_is_16_hdjaAabmubhzgrE cmpq $15,%r12 je .L_small_initial_num_blocks_is_15_hdjaAabmubhzgrE cmpq $14,%r12 je .L_small_initial_num_blocks_is_14_hdjaAabmubhzgrE jmp .L_small_initial_num_blocks_is_13_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_11_9_hdjaAabmubhzgrE: cmpq $11,%r12 je .L_small_initial_num_blocks_is_11_hdjaAabmubhzgrE cmpq $10,%r12 je .L_small_initial_num_blocks_is_10_hdjaAabmubhzgrE jmp .L_small_initial_num_blocks_is_9_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_7_1_hdjaAabmubhzgrE: cmpq $4,%r12 je .L_small_initial_num_blocks_is_4_hdjaAabmubhzgrE jl .L_small_initial_num_blocks_is_3_1_hdjaAabmubhzgrE cmpq $7,%r12 je .L_small_initial_num_blocks_is_7_hdjaAabmubhzgrE cmpq $6,%r12 je .L_small_initial_num_blocks_is_6_hdjaAabmubhzgrE jmp .L_small_initial_num_blocks_is_5_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_3_1_hdjaAabmubhzgrE: cmpq $3,%r12 je .L_small_initial_num_blocks_is_3_hdjaAabmubhzgrE cmpq $2,%r12 je .L_small_initial_num_blocks_is_2_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_1_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm0,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_oglzypDCtpAhyGa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_oglzypDCtpAhyGa .L_small_initial_partial_block_oglzypDCtpAhyGa: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp .L_after_reduction_oglzypDCtpAhyGa .L_small_initial_compute_done_oglzypDCtpAhyGa: .L_after_reduction_oglzypDCtpAhyGa: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_2_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm0,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mwbBGGvalpfhfnw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mwbBGGvalpfhfnw .L_small_initial_partial_block_mwbBGGvalpfhfnw: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mwbBGGvalpfhfnw: orq %r8,%r8 je .L_after_reduction_mwbBGGvalpfhfnw vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_mwbBGGvalpfhfnw: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_3_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_njdmEDjqDqutzfl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_njdmEDjqDqutzfl .L_small_initial_partial_block_njdmEDjqDqutzfl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_njdmEDjqDqutzfl: orq %r8,%r8 je .L_after_reduction_njdmEDjqDqutzfl vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_njdmEDjqDqutzfl: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_4_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EsFwhCqwxAhrvFa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EsFwhCqwxAhrvFa .L_small_initial_partial_block_EsFwhCqwxAhrvFa: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EsFwhCqwxAhrvFa: orq %r8,%r8 je .L_after_reduction_EsFwhCqwxAhrvFa vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_EsFwhCqwxAhrvFa: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_5_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %xmm29,%xmm3,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_aiubnzDkbAjBaGt subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_aiubnzDkbAjBaGt .L_small_initial_partial_block_aiubnzDkbAjBaGt: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_aiubnzDkbAjBaGt: orq %r8,%r8 je .L_after_reduction_aiubnzDkbAjBaGt vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_aiubnzDkbAjBaGt: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_6_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %ymm29,%ymm3,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GodgzzxioGrdAeg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GodgzzxioGrdAeg .L_small_initial_partial_block_GodgzzxioGrdAeg: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GodgzzxioGrdAeg: orq %r8,%r8 je .L_after_reduction_GodgzzxioGrdAeg vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_GodgzzxioGrdAeg: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_7_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jmqmzBeujCAjAxl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jmqmzBeujCAjAxl .L_small_initial_partial_block_jmqmzBeujCAjAxl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jmqmzBeujCAjAxl: orq %r8,%r8 je .L_after_reduction_jmqmzBeujCAjAxl vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_jmqmzBeujCAjAxl: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_8_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lGwzbjEigiuyrxp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lGwzbjEigiuyrxp .L_small_initial_partial_block_lGwzbjEigiuyrxp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lGwzbjEigiuyrxp: orq %r8,%r8 je .L_after_reduction_lGwzbjEigiuyrxp vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_lGwzbjEigiuyrxp: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_9_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %xmm29,%xmm4,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wtbpkxoFvlcvhkk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wtbpkxoFvlcvhkk .L_small_initial_partial_block_wtbpkxoFvlcvhkk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wtbpkxoFvlcvhkk: orq %r8,%r8 je .L_after_reduction_wtbpkxoFvlcvhkk vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_wtbpkxoFvlcvhkk: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_10_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %ymm29,%ymm4,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zDgaEerElzafAjF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zDgaEerElzafAjF .L_small_initial_partial_block_zDgaEerElzafAjF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zDgaEerElzafAjF: orq %r8,%r8 je .L_after_reduction_zDgaEerElzafAjF vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_zDgaEerElzafAjF: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_11_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BrkzfboGqlhyAvb subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BrkzfboGqlhyAvb .L_small_initial_partial_block_BrkzfboGqlhyAvb: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BrkzfboGqlhyAvb: orq %r8,%r8 je .L_after_reduction_BrkzfboGqlhyAvb vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_BrkzfboGqlhyAvb: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_12_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wBuxadGqDBDeard subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wBuxadGqDBDeard .L_small_initial_partial_block_wBuxadGqDBDeard: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wBuxadGqDBDeard: orq %r8,%r8 je .L_after_reduction_wBuxadGqDBDeard vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_wBuxadGqDBDeard: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_13_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %xmm29,%xmm5,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_koBzdarsEboqwan subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_koBzdarsEboqwan .L_small_initial_partial_block_koBzdarsEboqwan: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_koBzdarsEboqwan: orq %r8,%r8 je .L_after_reduction_koBzdarsEboqwan vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_koBzdarsEboqwan: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_14_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %ymm29,%ymm5,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EhgwDyGvdzvgvtp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EhgwDyGvdzvgvtp .L_small_initial_partial_block_EhgwDyGvdzvgvtp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EhgwDyGvdzvgvtp: orq %r8,%r8 je .L_after_reduction_EhgwDyGvdzvgvtp vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_EhgwDyGvdzvgvtp: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_15_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dguvDqtayFqucCq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dguvDqtayFqucCq .L_small_initial_partial_block_dguvDqtayFqucCq: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dguvDqtayFqucCq: orq %r8,%r8 je .L_after_reduction_dguvDqtayFqucCq vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_dguvDqtayFqucCq: jmp .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE .L_small_initial_num_blocks_is_16_hdjaAabmubhzgrE: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm0,%zmm6 vpshufb %zmm29,%zmm3,%zmm7 vpshufb %zmm29,%zmm4,%zmm10 vpshufb %zmm29,%zmm5,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_mlladecCGcaEame: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mlladecCGcaEame: vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_mlladecCGcaEame: .L_small_initial_blocks_encrypted_hdjaAabmubhzgrE: .L_ghash_done_ralurfzeatcGxDF: vmovdqu64 %xmm2,0(%rsi) .L_enc_dec_done_ralurfzeatcGxDF: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) .L_enc_dec_abort_ralurfzeatcGxDF: jmp .Lexit_gcm_encrypt .Lexit_gcm_encrypt: cmpq $256,%r8 jbe .Lskip_hkeys_cleanup_cccrurCdlggtEnk vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %zmm0,0(%rsp) vmovdqa64 %zmm0,64(%rsp) vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) vmovdqa64 %zmm0,320(%rsp) vmovdqa64 %zmm0,384(%rsp) vmovdqa64 %zmm0,448(%rsp) vmovdqa64 %zmm0,512(%rsp) vmovdqa64 %zmm0,576(%rsp) vmovdqa64 %zmm0,640(%rsp) vmovdqa64 %zmm0,704(%rsp) .Lskip_hkeys_cleanup_cccrurCdlggtEnk: vzeroupper leaq (%rbp),%rsp .cfi_def_cfa_register %rsp popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore %r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore %r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore %rbx .byte 0xf3,0xc3 .Lencrypt_seh_end: .cfi_endproc .size aes_gcm_encrypt_avx512, .-aes_gcm_encrypt_avx512 .globl aes_gcm_decrypt_avx512 .hidden aes_gcm_decrypt_avx512 .type aes_gcm_decrypt_avx512,@function .align 32 aes_gcm_decrypt_avx512: .cfi_startproc .Ldecrypt_seh_begin: .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 .Ldecrypt_seh_push_rbx: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 .Ldecrypt_seh_push_rbp: pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 .Ldecrypt_seh_push_r12: pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 .Ldecrypt_seh_push_r13: pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 .Ldecrypt_seh_push_r14: pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Ldecrypt_seh_push_r15: leaq 0(%rsp),%rbp .cfi_def_cfa_register %rbp .Ldecrypt_seh_setfp: .Ldecrypt_seh_prolog_end: subq $1588,%rsp andq $(-64),%rsp movl 240(%rdi),%eax cmpl $9,%eax je .Laes_gcm_decrypt_128_avx512 cmpl $11,%eax je .Laes_gcm_decrypt_192_avx512 cmpl $13,%eax je .Laes_gcm_decrypt_256_avx512 xorl %eax,%eax jmp .Lexit_gcm_decrypt .align 32 .Laes_gcm_decrypt_128_avx512: orq %r8,%r8 je .L_enc_dec_abort_icBhFhCkojGgnBc xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je .L_partial_block_done_Cwuafefseqcgife movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vmovdqa64 %xmm0,%xmm6 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge .L_no_extra_mask_Cwuafefseqcgife subq %r13,%r12 .L_no_extra_mask_Cwuafefseqcgife: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpand %xmm0,%xmm6,%xmm6 vpshufb SHUF_MASK(%rip),%xmm6,%xmm6 vpshufb %xmm5,%xmm6,%xmm6 vpxorq %xmm6,%xmm14,%xmm14 cmpq $0,%r13 jl .L_partial_incomplete_Cwuafefseqcgife .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp .L_enc_dec_done_Cwuafefseqcgife .L_partial_incomplete_Cwuafefseqcgife: addl %r8d,(%rdx) movq %r8,%rax .L_enc_dec_done_Cwuafefseqcgife: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} .L_partial_block_done_Cwuafefseqcgife: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je .L_enc_dec_done_icBhFhCkojGgnBc cmpq $256,%r8 jbe .L_message_below_equal_16_blocks_icBhFhCkojGgnBc vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae .L_next_16_overflow_DkBvliAEspzoabf vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_DkBvliAEspzoabf .L_next_16_overflow_DkBvliAEspzoabf: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_DkBvliAEspzoabf: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_yDAnEECuuGxfwvr vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) .L_skip_hkeys_precomputation_yDAnEECuuGxfwvr: cmpq $512,%r8 jb .L_message_below_32_blocks_icBhFhCkojGgnBc cmpb $240,%r15b jae .L_next_16_overflow_lgmzdneusufrFmr vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_lgmzdneusufrFmr .L_next_16_overflow_lgmzdneusufrFmr: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_lgmzdneusufrFmr: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz .L_skip_hkeys_precomputation_ecaeaEFhspgwivG vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) .L_skip_hkeys_precomputation_ecaeaEFhspgwivG: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb .L_no_more_big_nblocks_icBhFhCkojGgnBc .L_encrypt_big_nblocks_icBhFhCkojGgnBc: cmpb $240,%r15b jae .L_16_blocks_overflow_ApzaumldtosGeir vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_ApzaumldtosGeir .L_16_blocks_overflow_ApzaumldtosGeir: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_ApzaumldtosGeir: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_ubdpEpmjBbFwdEm vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_ubdpEpmjBbFwdEm .L_16_blocks_overflow_ubdpEpmjBbFwdEm: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_ubdpEpmjBbFwdEm: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_EdBasfawgBetkCB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_EdBasfawgBetkCB .L_16_blocks_overflow_EdBasfawgBetkCB: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_EdBasfawgBetkCB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae .L_encrypt_big_nblocks_icBhFhCkojGgnBc .L_no_more_big_nblocks_icBhFhCkojGgnBc: cmpq $512,%r8 jae .L_encrypt_32_blocks_icBhFhCkojGgnBc cmpq $256,%r8 jae .L_encrypt_16_blocks_icBhFhCkojGgnBc .L_encrypt_0_blocks_ghash_32_icBhFhCkojGgnBc: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_lGwozrmByuyygbo cmpl $8,%r10d je .L_last_num_blocks_is_8_lGwozrmByuyygbo jb .L_last_num_blocks_is_7_1_lGwozrmByuyygbo cmpl $12,%r10d je .L_last_num_blocks_is_12_lGwozrmByuyygbo jb .L_last_num_blocks_is_11_9_lGwozrmByuyygbo cmpl $15,%r10d je .L_last_num_blocks_is_15_lGwozrmByuyygbo ja .L_last_num_blocks_is_16_lGwozrmByuyygbo cmpl $14,%r10d je .L_last_num_blocks_is_14_lGwozrmByuyygbo jmp .L_last_num_blocks_is_13_lGwozrmByuyygbo .L_last_num_blocks_is_11_9_lGwozrmByuyygbo: cmpl $10,%r10d je .L_last_num_blocks_is_10_lGwozrmByuyygbo ja .L_last_num_blocks_is_11_lGwozrmByuyygbo jmp .L_last_num_blocks_is_9_lGwozrmByuyygbo .L_last_num_blocks_is_7_1_lGwozrmByuyygbo: cmpl $4,%r10d je .L_last_num_blocks_is_4_lGwozrmByuyygbo jb .L_last_num_blocks_is_3_1_lGwozrmByuyygbo cmpl $6,%r10d ja .L_last_num_blocks_is_7_lGwozrmByuyygbo je .L_last_num_blocks_is_6_lGwozrmByuyygbo jmp .L_last_num_blocks_is_5_lGwozrmByuyygbo .L_last_num_blocks_is_3_1_lGwozrmByuyygbo: cmpl $2,%r10d ja .L_last_num_blocks_is_3_lGwozrmByuyygbo je .L_last_num_blocks_is_2_lGwozrmByuyygbo .L_last_num_blocks_is_1_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_lClAAkfGiaxqtqb vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_lClAAkfGiaxqtqb .L_16_blocks_overflow_lClAAkfGiaxqtqb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_lClAAkfGiaxqtqb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_AljhFopbDmohEEm subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AljhFopbDmohEEm .L_small_initial_partial_block_AljhFopbDmohEEm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_AljhFopbDmohEEm .L_small_initial_compute_done_AljhFopbDmohEEm: .L_after_reduction_AljhFopbDmohEEm: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_2_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_dxfcclgCzfhujoB vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_dxfcclgCzfhujoB .L_16_blocks_overflow_dxfcclgCzfhujoB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_dxfcclgCzfhujoB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mhEghxGxhmrFGgF subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mhEghxGxhmrFGgF .L_small_initial_partial_block_mhEghxGxhmrFGgF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mhEghxGxhmrFGgF: orq %r8,%r8 je .L_after_reduction_mhEghxGxhmrFGgF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_mhEghxGxhmrFGgF: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_3_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_GzfdDtolkqgqFel vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_GzfdDtolkqgqFel .L_16_blocks_overflow_GzfdDtolkqgqFel: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_GzfdDtolkqgqFel: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qwrgpmqrxkxvCzs subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qwrgpmqrxkxvCzs .L_small_initial_partial_block_qwrgpmqrxkxvCzs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qwrgpmqrxkxvCzs: orq %r8,%r8 je .L_after_reduction_qwrgpmqrxkxvCzs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qwrgpmqrxkxvCzs: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_4_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_tFlldonsxgiBeEi vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_tFlldonsxgiBeEi .L_16_blocks_overflow_tFlldonsxgiBeEi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_tFlldonsxgiBeEi: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_gDoehbqfcmrseCg subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_gDoehbqfcmrseCg .L_small_initial_partial_block_gDoehbqfcmrseCg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_gDoehbqfcmrseCg: orq %r8,%r8 je .L_after_reduction_gDoehbqfcmrseCg vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_gDoehbqfcmrseCg: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_5_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_lAbhcGDwivukqtE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_lAbhcGDwivukqtE .L_16_blocks_overflow_lAbhcGDwivukqtE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_lAbhcGDwivukqtE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ewowyEuhltFopkj subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ewowyEuhltFopkj .L_small_initial_partial_block_ewowyEuhltFopkj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ewowyEuhltFopkj: orq %r8,%r8 je .L_after_reduction_ewowyEuhltFopkj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ewowyEuhltFopkj: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_6_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_xsoFcrclantxpei vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_xsoFcrclantxpei .L_16_blocks_overflow_xsoFcrclantxpei: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_xsoFcrclantxpei: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lzfnkiFifvcmjit subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lzfnkiFifvcmjit .L_small_initial_partial_block_lzfnkiFifvcmjit: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lzfnkiFifvcmjit: orq %r8,%r8 je .L_after_reduction_lzfnkiFifvcmjit vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lzfnkiFifvcmjit: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_7_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_xeeduBscFEzvdva vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_xeeduBscFEzvdva .L_16_blocks_overflow_xeeduBscFEzvdva: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_xeeduBscFEzvdva: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DrhotzwvddbqFrj subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DrhotzwvddbqFrj .L_small_initial_partial_block_DrhotzwvddbqFrj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DrhotzwvddbqFrj: orq %r8,%r8 je .L_after_reduction_DrhotzwvddbqFrj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DrhotzwvddbqFrj: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_8_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_kBlrofzDjhoFnxv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_kBlrofzDjhoFnxv .L_16_blocks_overflow_kBlrofzDjhoFnxv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_kBlrofzDjhoFnxv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_pAdxDizkcbwmjry subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pAdxDizkcbwmjry .L_small_initial_partial_block_pAdxDizkcbwmjry: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pAdxDizkcbwmjry: orq %r8,%r8 je .L_after_reduction_pAdxDizkcbwmjry vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_pAdxDizkcbwmjry: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_9_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_whsqDBkDGaknbAC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_whsqDBkDGaknbAC .L_16_blocks_overflow_whsqDBkDGaknbAC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_whsqDBkDGaknbAC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ranhBavDwnwbdEt subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ranhBavDwnwbdEt .L_small_initial_partial_block_ranhBavDwnwbdEt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ranhBavDwnwbdEt: orq %r8,%r8 je .L_after_reduction_ranhBavDwnwbdEt vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ranhBavDwnwbdEt: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_10_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_GbBbalFokmrvvlx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_GbBbalFokmrvvlx .L_16_blocks_overflow_GbBbalFokmrvvlx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_GbBbalFokmrvvlx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vyxjFnxqwhAbeyi subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vyxjFnxqwhAbeyi .L_small_initial_partial_block_vyxjFnxqwhAbeyi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vyxjFnxqwhAbeyi: orq %r8,%r8 je .L_after_reduction_vyxjFnxqwhAbeyi vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vyxjFnxqwhAbeyi: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_11_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_ldEsDEbywdmplpt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_ldEsDEbywdmplpt .L_16_blocks_overflow_ldEsDEbywdmplpt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_ldEsDEbywdmplpt: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dwbzkfjluwpFvvF subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dwbzkfjluwpFvvF .L_small_initial_partial_block_dwbzkfjluwpFvvF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dwbzkfjluwpFvvF: orq %r8,%r8 je .L_after_reduction_dwbzkfjluwpFvvF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dwbzkfjluwpFvvF: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_12_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_rAsEscwvsFrjwEn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_rAsEscwvsFrjwEn .L_16_blocks_overflow_rAsEscwvsFrjwEn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_rAsEscwvsFrjwEn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qqseyimgvencorf subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qqseyimgvencorf .L_small_initial_partial_block_qqseyimgvencorf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qqseyimgvencorf: orq %r8,%r8 je .L_after_reduction_qqseyimgvencorf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qqseyimgvencorf: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_13_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_yuCmdhwEwEhlsnq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_yuCmdhwEwEhlsnq .L_16_blocks_overflow_yuCmdhwEwEhlsnq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_yuCmdhwEwEhlsnq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qwyeDgkiECyoEct subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qwyeDgkiECyoEct .L_small_initial_partial_block_qwyeDgkiECyoEct: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qwyeDgkiECyoEct: orq %r8,%r8 je .L_after_reduction_qwyeDgkiECyoEct vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qwyeDgkiECyoEct: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_14_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_oEwrswoqGyjlsqe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_oEwrswoqGyjlsqe .L_16_blocks_overflow_oEwrswoqGyjlsqe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_oEwrswoqGyjlsqe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FvgcfpdwFDaojDh subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FvgcfpdwFDaojDh .L_small_initial_partial_block_FvgcfpdwFDaojDh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FvgcfpdwFDaojDh: orq %r8,%r8 je .L_after_reduction_FvgcfpdwFDaojDh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FvgcfpdwFDaojDh: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_15_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_CtjhmwDvAgBsAry vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_CtjhmwDvAgBsAry .L_16_blocks_overflow_CtjhmwDvAgBsAry: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_CtjhmwDvAgBsAry: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rertkxjeyegEbAD subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rertkxjeyegEbAD .L_small_initial_partial_block_rertkxjeyegEbAD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rertkxjeyegEbAD: orq %r8,%r8 je .L_after_reduction_rertkxjeyegEbAD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rertkxjeyegEbAD: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_16_lGwozrmByuyygbo: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_ejwsGBcDyFeryCA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_ejwsGBcDyFeryCA .L_16_blocks_overflow_ejwsGBcDyFeryCA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_ejwsGBcDyFeryCA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_vqjlBldpifEzCAi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vqjlBldpifEzCAi: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vqjlBldpifEzCAi: jmp .L_last_blocks_done_lGwozrmByuyygbo .L_last_num_blocks_is_0_lGwozrmByuyygbo: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_lGwozrmByuyygbo: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_icBhFhCkojGgnBc .L_encrypt_32_blocks_icBhFhCkojGgnBc: cmpb $240,%r15b jae .L_16_blocks_overflow_bqdrbusADEaesxh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_bqdrbusADEaesxh .L_16_blocks_overflow_bqdrbusADEaesxh: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_bqdrbusADEaesxh: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_jsiAuvqcAwfrdty vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_jsiAuvqcAwfrdty .L_16_blocks_overflow_jsiAuvqcAwfrdty: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_jsiAuvqcAwfrdty: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_aldutenGmyuhmFz cmpl $8,%r10d je .L_last_num_blocks_is_8_aldutenGmyuhmFz jb .L_last_num_blocks_is_7_1_aldutenGmyuhmFz cmpl $12,%r10d je .L_last_num_blocks_is_12_aldutenGmyuhmFz jb .L_last_num_blocks_is_11_9_aldutenGmyuhmFz cmpl $15,%r10d je .L_last_num_blocks_is_15_aldutenGmyuhmFz ja .L_last_num_blocks_is_16_aldutenGmyuhmFz cmpl $14,%r10d je .L_last_num_blocks_is_14_aldutenGmyuhmFz jmp .L_last_num_blocks_is_13_aldutenGmyuhmFz .L_last_num_blocks_is_11_9_aldutenGmyuhmFz: cmpl $10,%r10d je .L_last_num_blocks_is_10_aldutenGmyuhmFz ja .L_last_num_blocks_is_11_aldutenGmyuhmFz jmp .L_last_num_blocks_is_9_aldutenGmyuhmFz .L_last_num_blocks_is_7_1_aldutenGmyuhmFz: cmpl $4,%r10d je .L_last_num_blocks_is_4_aldutenGmyuhmFz jb .L_last_num_blocks_is_3_1_aldutenGmyuhmFz cmpl $6,%r10d ja .L_last_num_blocks_is_7_aldutenGmyuhmFz je .L_last_num_blocks_is_6_aldutenGmyuhmFz jmp .L_last_num_blocks_is_5_aldutenGmyuhmFz .L_last_num_blocks_is_3_1_aldutenGmyuhmFz: cmpl $2,%r10d ja .L_last_num_blocks_is_3_aldutenGmyuhmFz je .L_last_num_blocks_is_2_aldutenGmyuhmFz .L_last_num_blocks_is_1_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_kounvuokEjmfgDl vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_kounvuokEjmfgDl .L_16_blocks_overflow_kounvuokEjmfgDl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_kounvuokEjmfgDl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_myyjGGFduxDnmrl subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_myyjGGFduxDnmrl .L_small_initial_partial_block_myyjGGFduxDnmrl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_myyjGGFduxDnmrl .L_small_initial_compute_done_myyjGGFduxDnmrl: .L_after_reduction_myyjGGFduxDnmrl: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_2_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_GkcjorkhgDBFApE vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_GkcjorkhgDBFApE .L_16_blocks_overflow_GkcjorkhgDBFApE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_GkcjorkhgDBFApE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_luGrBrcBwGbkypf subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_luGrBrcBwGbkypf .L_small_initial_partial_block_luGrBrcBwGbkypf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_luGrBrcBwGbkypf: orq %r8,%r8 je .L_after_reduction_luGrBrcBwGbkypf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_luGrBrcBwGbkypf: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_3_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_GlGoAfCtaxDnccC vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_GlGoAfCtaxDnccC .L_16_blocks_overflow_GlGoAfCtaxDnccC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_GlGoAfCtaxDnccC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hjFElydehDprmun subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hjFElydehDprmun .L_small_initial_partial_block_hjFElydehDprmun: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hjFElydehDprmun: orq %r8,%r8 je .L_after_reduction_hjFElydehDprmun vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_hjFElydehDprmun: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_4_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_zwfpgGyijsBkpeE vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_zwfpgGyijsBkpeE .L_16_blocks_overflow_zwfpgGyijsBkpeE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_zwfpgGyijsBkpeE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ftaDveFCagABhCd subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ftaDveFCagABhCd .L_small_initial_partial_block_ftaDveFCagABhCd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ftaDveFCagABhCd: orq %r8,%r8 je .L_after_reduction_ftaDveFCagABhCd vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ftaDveFCagABhCd: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_5_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_CizAwbkEgozyasa vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_CizAwbkEgozyasa .L_16_blocks_overflow_CizAwbkEgozyasa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_CizAwbkEgozyasa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_uvigeFCkFhxrjol subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_uvigeFCkFhxrjol .L_small_initial_partial_block_uvigeFCkFhxrjol: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_uvigeFCkFhxrjol: orq %r8,%r8 je .L_after_reduction_uvigeFCkFhxrjol vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_uvigeFCkFhxrjol: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_6_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_yuzbpkwFyzjuBAz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_yuzbpkwFyzjuBAz .L_16_blocks_overflow_yuzbpkwFyzjuBAz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_yuzbpkwFyzjuBAz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mGnxEwEsoAvgkoh subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mGnxEwEsoAvgkoh .L_small_initial_partial_block_mGnxEwEsoAvgkoh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mGnxEwEsoAvgkoh: orq %r8,%r8 je .L_after_reduction_mGnxEwEsoAvgkoh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_mGnxEwEsoAvgkoh: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_7_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_fDccaFllCyjzgaw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_fDccaFllCyjzgaw .L_16_blocks_overflow_fDccaFllCyjzgaw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_fDccaFllCyjzgaw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_awcpyfsBbqeAyhp subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_awcpyfsBbqeAyhp .L_small_initial_partial_block_awcpyfsBbqeAyhp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_awcpyfsBbqeAyhp: orq %r8,%r8 je .L_after_reduction_awcpyfsBbqeAyhp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_awcpyfsBbqeAyhp: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_8_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_yuxjCAwGGjlocDt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_yuxjCAwGGjlocDt .L_16_blocks_overflow_yuxjCAwGGjlocDt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_yuxjCAwGGjlocDt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tCmmipfvAEinBtG subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tCmmipfvAEinBtG .L_small_initial_partial_block_tCmmipfvAEinBtG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tCmmipfvAEinBtG: orq %r8,%r8 je .L_after_reduction_tCmmipfvAEinBtG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tCmmipfvAEinBtG: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_9_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_FrborCeuBByFkga vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_FrborCeuBByFkga .L_16_blocks_overflow_FrborCeuBByFkga: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_FrborCeuBByFkga: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rhmklrqdhsjaixG subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rhmklrqdhsjaixG .L_small_initial_partial_block_rhmklrqdhsjaixG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rhmklrqdhsjaixG: orq %r8,%r8 je .L_after_reduction_rhmklrqdhsjaixG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rhmklrqdhsjaixG: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_10_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_uqpvEzAtlprmDsg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_uqpvEzAtlprmDsg .L_16_blocks_overflow_uqpvEzAtlprmDsg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_uqpvEzAtlprmDsg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yslffbaddFCEqwA subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yslffbaddFCEqwA .L_small_initial_partial_block_yslffbaddFCEqwA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yslffbaddFCEqwA: orq %r8,%r8 je .L_after_reduction_yslffbaddFCEqwA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yslffbaddFCEqwA: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_11_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_wyBrnxyfcdFguiF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_wyBrnxyfcdFguiF .L_16_blocks_overflow_wyBrnxyfcdFguiF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_wyBrnxyfcdFguiF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_axnFjCbcEhxjDmF subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_axnFjCbcEhxjDmF .L_small_initial_partial_block_axnFjCbcEhxjDmF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_axnFjCbcEhxjDmF: orq %r8,%r8 je .L_after_reduction_axnFjCbcEhxjDmF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_axnFjCbcEhxjDmF: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_12_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_nbfsGzmFjniAhpc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_nbfsGzmFjniAhpc .L_16_blocks_overflow_nbfsGzmFjniAhpc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_nbfsGzmFjniAhpc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nmuwjreDfxCetjh subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nmuwjreDfxCetjh .L_small_initial_partial_block_nmuwjreDfxCetjh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nmuwjreDfxCetjh: orq %r8,%r8 je .L_after_reduction_nmuwjreDfxCetjh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nmuwjreDfxCetjh: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_13_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_BlpixnjkGtBtzBl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_BlpixnjkGtBtzBl .L_16_blocks_overflow_BlpixnjkGtBtzBl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_BlpixnjkGtBtzBl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FdqyplwEjyoxvwf subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FdqyplwEjyoxvwf .L_small_initial_partial_block_FdqyplwEjyoxvwf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FdqyplwEjyoxvwf: orq %r8,%r8 je .L_after_reduction_FdqyplwEjyoxvwf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FdqyplwEjyoxvwf: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_14_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_nlkisqljGgnlewr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_nlkisqljGgnlewr .L_16_blocks_overflow_nlkisqljGgnlewr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_nlkisqljGgnlewr: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ByszoDfuCgvEska subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ByszoDfuCgvEska .L_small_initial_partial_block_ByszoDfuCgvEska: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ByszoDfuCgvEska: orq %r8,%r8 je .L_after_reduction_ByszoDfuCgvEska vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ByszoDfuCgvEska: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_15_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_FewkqxwDmrjetmG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_FewkqxwDmrjetmG .L_16_blocks_overflow_FewkqxwDmrjetmG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_FewkqxwDmrjetmG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jjbxCtvydaGqepC subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jjbxCtvydaGqepC .L_small_initial_partial_block_jjbxCtvydaGqepC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jjbxCtvydaGqepC: orq %r8,%r8 je .L_after_reduction_jjbxCtvydaGqepC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jjbxCtvydaGqepC: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_16_aldutenGmyuhmFz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_hEoxzbghGBmpbpw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_hEoxzbghGBmpbpw .L_16_blocks_overflow_hEoxzbghGBmpbpw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_hEoxzbghGBmpbpw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_usEFihDgqghhogg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_usEFihDgqghhogg: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_usEFihDgqghhogg: jmp .L_last_blocks_done_aldutenGmyuhmFz .L_last_num_blocks_is_0_aldutenGmyuhmFz: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_aldutenGmyuhmFz: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_icBhFhCkojGgnBc .L_encrypt_16_blocks_icBhFhCkojGgnBc: cmpb $240,%r15b jae .L_16_blocks_overflow_xlvtosuhBBytzsd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_xlvtosuhBBytzsd .L_16_blocks_overflow_xlvtosuhBBytzsd: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_xlvtosuhBBytzsd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_CqqjsGobDovpiom cmpl $8,%r10d je .L_last_num_blocks_is_8_CqqjsGobDovpiom jb .L_last_num_blocks_is_7_1_CqqjsGobDovpiom cmpl $12,%r10d je .L_last_num_blocks_is_12_CqqjsGobDovpiom jb .L_last_num_blocks_is_11_9_CqqjsGobDovpiom cmpl $15,%r10d je .L_last_num_blocks_is_15_CqqjsGobDovpiom ja .L_last_num_blocks_is_16_CqqjsGobDovpiom cmpl $14,%r10d je .L_last_num_blocks_is_14_CqqjsGobDovpiom jmp .L_last_num_blocks_is_13_CqqjsGobDovpiom .L_last_num_blocks_is_11_9_CqqjsGobDovpiom: cmpl $10,%r10d je .L_last_num_blocks_is_10_CqqjsGobDovpiom ja .L_last_num_blocks_is_11_CqqjsGobDovpiom jmp .L_last_num_blocks_is_9_CqqjsGobDovpiom .L_last_num_blocks_is_7_1_CqqjsGobDovpiom: cmpl $4,%r10d je .L_last_num_blocks_is_4_CqqjsGobDovpiom jb .L_last_num_blocks_is_3_1_CqqjsGobDovpiom cmpl $6,%r10d ja .L_last_num_blocks_is_7_CqqjsGobDovpiom je .L_last_num_blocks_is_6_CqqjsGobDovpiom jmp .L_last_num_blocks_is_5_CqqjsGobDovpiom .L_last_num_blocks_is_3_1_CqqjsGobDovpiom: cmpl $2,%r10d ja .L_last_num_blocks_is_3_CqqjsGobDovpiom je .L_last_num_blocks_is_2_CqqjsGobDovpiom .L_last_num_blocks_is_1_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_iCcBbEaCnnBtiGz vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_iCcBbEaCnnBtiGz .L_16_blocks_overflow_iCcBbEaCnnBtiGz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_iCcBbEaCnnBtiGz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_himCBxsCzdjqdtp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_himCBxsCzdjqdtp .L_small_initial_partial_block_himCBxsCzdjqdtp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_himCBxsCzdjqdtp .L_small_initial_compute_done_himCBxsCzdjqdtp: .L_after_reduction_himCBxsCzdjqdtp: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_2_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_spdFufbAAGcAxFf vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_spdFufbAAGcAxFf .L_16_blocks_overflow_spdFufbAAGcAxFf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_spdFufbAAGcAxFf: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vdGxihcuFDvcDGx subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vdGxihcuFDvcDGx .L_small_initial_partial_block_vdGxihcuFDvcDGx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vdGxihcuFDvcDGx: orq %r8,%r8 je .L_after_reduction_vdGxihcuFDvcDGx vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vdGxihcuFDvcDGx: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_3_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_sBAazunogzDjqho vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_sBAazunogzDjqho .L_16_blocks_overflow_sBAazunogzDjqho: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_sBAazunogzDjqho: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tldtpncdejgAGjh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tldtpncdejgAGjh .L_small_initial_partial_block_tldtpncdejgAGjh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tldtpncdejgAGjh: orq %r8,%r8 je .L_after_reduction_tldtpncdejgAGjh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tldtpncdejgAGjh: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_4_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_sekyjhofosAtkyB vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_sekyjhofosAtkyB .L_16_blocks_overflow_sekyjhofosAtkyB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_sekyjhofosAtkyB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wkomnalwByedats subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wkomnalwByedats .L_small_initial_partial_block_wkomnalwByedats: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wkomnalwByedats: orq %r8,%r8 je .L_after_reduction_wkomnalwByedats vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wkomnalwByedats: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_5_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_zdkGskjaniDljeq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_zdkGskjaniDljeq .L_16_blocks_overflow_zdkGskjaniDljeq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_zdkGskjaniDljeq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nCkgxpzwqEAtDfb subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nCkgxpzwqEAtDfb .L_small_initial_partial_block_nCkgxpzwqEAtDfb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nCkgxpzwqEAtDfb: orq %r8,%r8 je .L_after_reduction_nCkgxpzwqEAtDfb vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nCkgxpzwqEAtDfb: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_6_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_mrylAcnDjuqklnd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_mrylAcnDjuqklnd .L_16_blocks_overflow_mrylAcnDjuqklnd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_mrylAcnDjuqklnd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dtDgucpjyaambao subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dtDgucpjyaambao .L_small_initial_partial_block_dtDgucpjyaambao: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dtDgucpjyaambao: orq %r8,%r8 je .L_after_reduction_dtDgucpjyaambao vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dtDgucpjyaambao: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_7_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_ektccsvjwlnFwnw vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_ektccsvjwlnFwnw .L_16_blocks_overflow_ektccsvjwlnFwnw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_ektccsvjwlnFwnw: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rbfqryodaBgimfn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rbfqryodaBgimfn .L_small_initial_partial_block_rbfqryodaBgimfn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rbfqryodaBgimfn: orq %r8,%r8 je .L_after_reduction_rbfqryodaBgimfn vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rbfqryodaBgimfn: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_8_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_GGmuDhkjBtqxcEd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_GGmuDhkjBtqxcEd .L_16_blocks_overflow_GGmuDhkjBtqxcEd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_GGmuDhkjBtqxcEd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_aapCFFxCFiAoabs subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_aapCFFxCFiAoabs .L_small_initial_partial_block_aapCFFxCFiAoabs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_aapCFFxCFiAoabs: orq %r8,%r8 je .L_after_reduction_aapCFFxCFiAoabs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_aapCFFxCFiAoabs: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_9_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_lDwlixsAzhAgDkG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_lDwlixsAzhAgDkG .L_16_blocks_overflow_lDwlixsAzhAgDkG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_lDwlixsAzhAgDkG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ixzDxvojEApEnCt subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ixzDxvojEApEnCt .L_small_initial_partial_block_ixzDxvojEApEnCt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ixzDxvojEApEnCt: orq %r8,%r8 je .L_after_reduction_ixzDxvojEApEnCt vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ixzDxvojEApEnCt: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_10_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_BbbzknmqtuDuEfg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_BbbzknmqtuDuEfg .L_16_blocks_overflow_BbbzknmqtuDuEfg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_BbbzknmqtuDuEfg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ocgDwclfceuanoy subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ocgDwclfceuanoy .L_small_initial_partial_block_ocgDwclfceuanoy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ocgDwclfceuanoy: orq %r8,%r8 je .L_after_reduction_ocgDwclfceuanoy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ocgDwclfceuanoy: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_11_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_jatgakEfrDmqCyG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_jatgakEfrDmqCyG .L_16_blocks_overflow_jatgakEfrDmqCyG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_jatgakEfrDmqCyG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tklDcEsdEdnDloA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tklDcEsdEdnDloA .L_small_initial_partial_block_tklDcEsdEdnDloA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tklDcEsdEdnDloA: orq %r8,%r8 je .L_after_reduction_tklDcEsdEdnDloA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tklDcEsdEdnDloA: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_12_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_tovGfhABebkuFEt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_tovGfhABebkuFEt .L_16_blocks_overflow_tovGfhABebkuFEt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_tovGfhABebkuFEt: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EEeschrlAysrrgg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EEeschrlAysrrgg .L_small_initial_partial_block_EEeschrlAysrrgg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EEeschrlAysrrgg: orq %r8,%r8 je .L_after_reduction_EEeschrlAysrrgg vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EEeschrlAysrrgg: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_13_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_ChCrwqCswoEpicz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_ChCrwqCswoEpicz .L_16_blocks_overflow_ChCrwqCswoEpicz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_ChCrwqCswoEpicz: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iBgxbAnxnejeaAD subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iBgxbAnxnejeaAD .L_small_initial_partial_block_iBgxbAnxnejeaAD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iBgxbAnxnejeaAD: orq %r8,%r8 je .L_after_reduction_iBgxbAnxnejeaAD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iBgxbAnxnejeaAD: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_14_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_GzibzgsizEbkyAE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_GzibzgsizEbkyAE .L_16_blocks_overflow_GzibzgsizEbkyAE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_GzibzgsizEbkyAE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ulArrmByoEAEezF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ulArrmByoEAEezF .L_small_initial_partial_block_ulArrmByoEAEezF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ulArrmByoEAEezF: orq %r8,%r8 je .L_after_reduction_ulArrmByoEAEezF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ulArrmByoEAEezF: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_15_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_DExqfkaBzzhxtrd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_DExqfkaBzzhxtrd .L_16_blocks_overflow_DExqfkaBzzhxtrd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_DExqfkaBzzhxtrd: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zqfGgrfeCzzwkzB subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zqfGgrfeCzzwkzB .L_small_initial_partial_block_zqfGgrfeCzzwkzB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zqfGgrfeCzzwkzB: orq %r8,%r8 je .L_after_reduction_zqfGgrfeCzzwkzB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_zqfGgrfeCzzwkzB: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_16_CqqjsGobDovpiom: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_fanaekDAulfkhcb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_fanaekDAulfkhcb .L_16_blocks_overflow_fanaekDAulfkhcb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_fanaekDAulfkhcb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_pCDjmBApGDgFGhw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pCDjmBApGDgFGhw: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_pCDjmBApGDgFGhw: jmp .L_last_blocks_done_CqqjsGobDovpiom .L_last_num_blocks_is_0_CqqjsGobDovpiom: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_CqqjsGobDovpiom: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_icBhFhCkojGgnBc .L_message_below_32_blocks_icBhFhCkojGgnBc: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_lurmstfAeByrDpz vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .L_skip_hkeys_precomputation_lurmstfAeByrDpz: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_kpvFtqCzpagsbmy cmpl $8,%r10d je .L_last_num_blocks_is_8_kpvFtqCzpagsbmy jb .L_last_num_blocks_is_7_1_kpvFtqCzpagsbmy cmpl $12,%r10d je .L_last_num_blocks_is_12_kpvFtqCzpagsbmy jb .L_last_num_blocks_is_11_9_kpvFtqCzpagsbmy cmpl $15,%r10d je .L_last_num_blocks_is_15_kpvFtqCzpagsbmy ja .L_last_num_blocks_is_16_kpvFtqCzpagsbmy cmpl $14,%r10d je .L_last_num_blocks_is_14_kpvFtqCzpagsbmy jmp .L_last_num_blocks_is_13_kpvFtqCzpagsbmy .L_last_num_blocks_is_11_9_kpvFtqCzpagsbmy: cmpl $10,%r10d je .L_last_num_blocks_is_10_kpvFtqCzpagsbmy ja .L_last_num_blocks_is_11_kpvFtqCzpagsbmy jmp .L_last_num_blocks_is_9_kpvFtqCzpagsbmy .L_last_num_blocks_is_7_1_kpvFtqCzpagsbmy: cmpl $4,%r10d je .L_last_num_blocks_is_4_kpvFtqCzpagsbmy jb .L_last_num_blocks_is_3_1_kpvFtqCzpagsbmy cmpl $6,%r10d ja .L_last_num_blocks_is_7_kpvFtqCzpagsbmy je .L_last_num_blocks_is_6_kpvFtqCzpagsbmy jmp .L_last_num_blocks_is_5_kpvFtqCzpagsbmy .L_last_num_blocks_is_3_1_kpvFtqCzpagsbmy: cmpl $2,%r10d ja .L_last_num_blocks_is_3_kpvFtqCzpagsbmy je .L_last_num_blocks_is_2_kpvFtqCzpagsbmy .L_last_num_blocks_is_1_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_erjqEcdgnsabCAp vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_erjqEcdgnsabCAp .L_16_blocks_overflow_erjqEcdgnsabCAp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_erjqEcdgnsabCAp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_tihlhrngdnEcfCn subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tihlhrngdnEcfCn .L_small_initial_partial_block_tihlhrngdnEcfCn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_tihlhrngdnEcfCn .L_small_initial_compute_done_tihlhrngdnEcfCn: .L_after_reduction_tihlhrngdnEcfCn: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_2_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_AibviGpsltwhwck vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_AibviGpsltwhwck .L_16_blocks_overflow_AibviGpsltwhwck: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_AibviGpsltwhwck: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_CGytiedGuwlshAl subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_CGytiedGuwlshAl .L_small_initial_partial_block_CGytiedGuwlshAl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_CGytiedGuwlshAl: orq %r8,%r8 je .L_after_reduction_CGytiedGuwlshAl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_CGytiedGuwlshAl: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_3_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_cwyoDiaxggCofzt vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_cwyoDiaxggCofzt .L_16_blocks_overflow_cwyoDiaxggCofzt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_cwyoDiaxggCofzt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_khdhzwEsobgrlgi subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_khdhzwEsobgrlgi .L_small_initial_partial_block_khdhzwEsobgrlgi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_khdhzwEsobgrlgi: orq %r8,%r8 je .L_after_reduction_khdhzwEsobgrlgi vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_khdhzwEsobgrlgi: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_4_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_fqeFwlbvdGyejoA vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_fqeFwlbvdGyejoA .L_16_blocks_overflow_fqeFwlbvdGyejoA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_fqeFwlbvdGyejoA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_bzlErbuuhovEdpE subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_bzlErbuuhovEdpE .L_small_initial_partial_block_bzlErbuuhovEdpE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bzlErbuuhovEdpE: orq %r8,%r8 je .L_after_reduction_bzlErbuuhovEdpE vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_bzlErbuuhovEdpE: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_5_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_cjnavuxfcgGzzCb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_cjnavuxfcgGzzCb .L_16_blocks_overflow_cjnavuxfcgGzzCb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_cjnavuxfcgGzzCb: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_eaEmrzsvCBDlnpC subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_eaEmrzsvCBDlnpC .L_small_initial_partial_block_eaEmrzsvCBDlnpC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_eaEmrzsvCBDlnpC: orq %r8,%r8 je .L_after_reduction_eaEmrzsvCBDlnpC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_eaEmrzsvCBDlnpC: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_6_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_DndbknmyrzkriDg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_DndbknmyrzkriDg .L_16_blocks_overflow_DndbknmyrzkriDg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_DndbknmyrzkriDg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_geoBBGllnatlCqq subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_geoBBGllnatlCqq .L_small_initial_partial_block_geoBBGllnatlCqq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_geoBBGllnatlCqq: orq %r8,%r8 je .L_after_reduction_geoBBGllnatlCqq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_geoBBGllnatlCqq: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_7_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_jtGaGqFaokuwcFo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_jtGaGqFaokuwcFo .L_16_blocks_overflow_jtGaGqFaokuwcFo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_jtGaGqFaokuwcFo: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xkeqvjpCBEjlkGx subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xkeqvjpCBEjlkGx .L_small_initial_partial_block_xkeqvjpCBEjlkGx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xkeqvjpCBEjlkGx: orq %r8,%r8 je .L_after_reduction_xkeqvjpCBEjlkGx vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_xkeqvjpCBEjlkGx: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_8_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_BCegvazduGiwBqv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_BCegvazduGiwBqv .L_16_blocks_overflow_BCegvazduGiwBqv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_BCegvazduGiwBqv: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_fjDnDwdgfswBjwp subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_fjDnDwdgfswBjwp .L_small_initial_partial_block_fjDnDwdgfswBjwp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_fjDnDwdgfswBjwp: orq %r8,%r8 je .L_after_reduction_fjDnDwdgfswBjwp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_fjDnDwdgfswBjwp: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_9_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_nGczFFdvDDdbdAl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_nGczFFdvDDdbdAl .L_16_blocks_overflow_nGczFFdvDDdbdAl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_nGczFFdvDDdbdAl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FEodvvDmqnsbxoz subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FEodvvDmqnsbxoz .L_small_initial_partial_block_FEodvvDmqnsbxoz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FEodvvDmqnsbxoz: orq %r8,%r8 je .L_after_reduction_FEodvvDmqnsbxoz vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FEodvvDmqnsbxoz: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_10_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_oulxbBotdhvdFbg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_oulxbBotdhvdFbg .L_16_blocks_overflow_oulxbBotdhvdFbg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_oulxbBotdhvdFbg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mAhDuzfffzBcqnw subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mAhDuzfffzBcqnw .L_small_initial_partial_block_mAhDuzfffzBcqnw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mAhDuzfffzBcqnw: orq %r8,%r8 je .L_after_reduction_mAhDuzfffzBcqnw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_mAhDuzfffzBcqnw: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_11_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_nCertFgkfoCxtun vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_nCertFgkfoCxtun .L_16_blocks_overflow_nCertFgkfoCxtun: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_nCertFgkfoCxtun: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dygAbwCGlokBzAu subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dygAbwCGlokBzAu .L_small_initial_partial_block_dygAbwCGlokBzAu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dygAbwCGlokBzAu: orq %r8,%r8 je .L_after_reduction_dygAbwCGlokBzAu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dygAbwCGlokBzAu: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_12_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_DtwkcFbdCfdcCrh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_DtwkcFbdCfdcCrh .L_16_blocks_overflow_DtwkcFbdCfdcCrh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_DtwkcFbdCfdcCrh: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_uelgeBErnEDceCF subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_uelgeBErnEDceCF .L_small_initial_partial_block_uelgeBErnEDceCF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_uelgeBErnEDceCF: orq %r8,%r8 je .L_after_reduction_uelgeBErnEDceCF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_uelgeBErnEDceCF: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_13_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_ndumifgEEuiqDiF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_ndumifgEEuiqDiF .L_16_blocks_overflow_ndumifgEEuiqDiF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_ndumifgEEuiqDiF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DrrvrAjlkiwmAzx subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DrrvrAjlkiwmAzx .L_small_initial_partial_block_DrrvrAjlkiwmAzx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DrrvrAjlkiwmAzx: orq %r8,%r8 je .L_after_reduction_DrrvrAjlkiwmAzx vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DrrvrAjlkiwmAzx: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_14_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_osDGzgifEhqjECm vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_osDGzgifEhqjECm .L_16_blocks_overflow_osDGzgifEhqjECm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_osDGzgifEhqjECm: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_fwjCFubGdkywpFz subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_fwjCFubGdkywpFz .L_small_initial_partial_block_fwjCFubGdkywpFz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_fwjCFubGdkywpFz: orq %r8,%r8 je .L_after_reduction_fwjCFubGdkywpFz vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_fwjCFubGdkywpFz: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_15_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_tiCBFudBnEgekda vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_tiCBFudBnEgekda .L_16_blocks_overflow_tiCBFudBnEgekda: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_tiCBFudBnEgekda: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_sbduutzwEklCDpB subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_sbduutzwEklCDpB .L_small_initial_partial_block_sbduutzwEklCDpB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_sbduutzwEklCDpB: orq %r8,%r8 je .L_after_reduction_sbduutzwEklCDpB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_sbduutzwEklCDpB: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_16_kpvFtqCzpagsbmy: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_ennneCoBjzBsijF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_ennneCoBjzBsijF .L_16_blocks_overflow_ennneCoBjzBsijF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_ennneCoBjzBsijF: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_qbevliloqkkkFsD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qbevliloqkkkFsD: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qbevliloqkkkFsD: jmp .L_last_blocks_done_kpvFtqCzpagsbmy .L_last_num_blocks_is_0_kpvFtqCzpagsbmy: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_kpvFtqCzpagsbmy: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_icBhFhCkojGgnBc .L_message_below_equal_16_blocks_icBhFhCkojGgnBc: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je .L_small_initial_num_blocks_is_8_fAioGdenAmmvupb jl .L_small_initial_num_blocks_is_7_1_fAioGdenAmmvupb cmpq $12,%r12 je .L_small_initial_num_blocks_is_12_fAioGdenAmmvupb jl .L_small_initial_num_blocks_is_11_9_fAioGdenAmmvupb cmpq $16,%r12 je .L_small_initial_num_blocks_is_16_fAioGdenAmmvupb cmpq $15,%r12 je .L_small_initial_num_blocks_is_15_fAioGdenAmmvupb cmpq $14,%r12 je .L_small_initial_num_blocks_is_14_fAioGdenAmmvupb jmp .L_small_initial_num_blocks_is_13_fAioGdenAmmvupb .L_small_initial_num_blocks_is_11_9_fAioGdenAmmvupb: cmpq $11,%r12 je .L_small_initial_num_blocks_is_11_fAioGdenAmmvupb cmpq $10,%r12 je .L_small_initial_num_blocks_is_10_fAioGdenAmmvupb jmp .L_small_initial_num_blocks_is_9_fAioGdenAmmvupb .L_small_initial_num_blocks_is_7_1_fAioGdenAmmvupb: cmpq $4,%r12 je .L_small_initial_num_blocks_is_4_fAioGdenAmmvupb jl .L_small_initial_num_blocks_is_3_1_fAioGdenAmmvupb cmpq $7,%r12 je .L_small_initial_num_blocks_is_7_fAioGdenAmmvupb cmpq $6,%r12 je .L_small_initial_num_blocks_is_6_fAioGdenAmmvupb jmp .L_small_initial_num_blocks_is_5_fAioGdenAmmvupb .L_small_initial_num_blocks_is_3_1_fAioGdenAmmvupb: cmpq $3,%r12 je .L_small_initial_num_blocks_is_3_fAioGdenAmmvupb cmpq $2,%r12 je .L_small_initial_num_blocks_is_2_fAioGdenAmmvupb .L_small_initial_num_blocks_is_1_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm6,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_adigDqnunatgwqg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_adigDqnunatgwqg .L_small_initial_partial_block_adigDqnunatgwqg: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp .L_after_reduction_adigDqnunatgwqg .L_small_initial_compute_done_adigDqnunatgwqg: .L_after_reduction_adigDqnunatgwqg: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_2_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm6,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wijsfgBfoycrhbf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wijsfgBfoycrhbf .L_small_initial_partial_block_wijsfgBfoycrhbf: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wijsfgBfoycrhbf: orq %r8,%r8 je .L_after_reduction_wijsfgBfoycrhbf vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_wijsfgBfoycrhbf: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_3_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ibqkzvjmvrGthss subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ibqkzvjmvrGthss .L_small_initial_partial_block_ibqkzvjmvrGthss: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ibqkzvjmvrGthss: orq %r8,%r8 je .L_after_reduction_ibqkzvjmvrGthss vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_ibqkzvjmvrGthss: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_4_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xlbdECcsDitBbrC subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xlbdECcsDitBbrC .L_small_initial_partial_block_xlbdECcsDitBbrC: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xlbdECcsDitBbrC: orq %r8,%r8 je .L_after_reduction_xlbdECcsDitBbrC vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_xlbdECcsDitBbrC: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_5_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %xmm29,%xmm7,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AvBuAcGaAAhviww subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AvBuAcGaAAhviww .L_small_initial_partial_block_AvBuAcGaAAhviww: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AvBuAcGaAAhviww: orq %r8,%r8 je .L_after_reduction_AvBuAcGaAAhviww vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_AvBuAcGaAAhviww: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_6_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %ymm29,%ymm7,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dmnwagjDbfGuxqa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dmnwagjDbfGuxqa .L_small_initial_partial_block_dmnwagjDbfGuxqa: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dmnwagjDbfGuxqa: orq %r8,%r8 je .L_after_reduction_dmnwagjDbfGuxqa vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_dmnwagjDbfGuxqa: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_7_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FFvlakmlCAfckcF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FFvlakmlCAfckcF .L_small_initial_partial_block_FFvlakmlCAfckcF: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FFvlakmlCAfckcF: orq %r8,%r8 je .L_after_reduction_FFvlakmlCAfckcF vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_FFvlakmlCAfckcF: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_8_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_brxnjeBcvFoBFjp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_brxnjeBcvFoBFjp .L_small_initial_partial_block_brxnjeBcvFoBFjp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_brxnjeBcvFoBFjp: orq %r8,%r8 je .L_after_reduction_brxnjeBcvFoBFjp vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_brxnjeBcvFoBFjp: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_9_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %xmm29,%xmm10,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ruEgsxDerxegpsB subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ruEgsxDerxegpsB .L_small_initial_partial_block_ruEgsxDerxegpsB: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ruEgsxDerxegpsB: orq %r8,%r8 je .L_after_reduction_ruEgsxDerxegpsB vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_ruEgsxDerxegpsB: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_10_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %ymm29,%ymm10,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DtsnEBEgqapGgkD subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DtsnEBEgqapGgkD .L_small_initial_partial_block_DtsnEBEgqapGgkD: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DtsnEBEgqapGgkD: orq %r8,%r8 je .L_after_reduction_DtsnEBEgqapGgkD vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_DtsnEBEgqapGgkD: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_11_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FAlijzFrzEsACFt subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FAlijzFrzEsACFt .L_small_initial_partial_block_FAlijzFrzEsACFt: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FAlijzFrzEsACFt: orq %r8,%r8 je .L_after_reduction_FAlijzFrzEsACFt vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_FAlijzFrzEsACFt: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_12_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xdDFiiniApojwBg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xdDFiiniApojwBg .L_small_initial_partial_block_xdDFiiniApojwBg: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xdDFiiniApojwBg: orq %r8,%r8 je .L_after_reduction_xdDFiiniApojwBg vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_xdDFiiniApojwBg: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_13_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %xmm29,%xmm11,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nwkAjutBGaaatpl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nwkAjutBGaaatpl .L_small_initial_partial_block_nwkAjutBGaaatpl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nwkAjutBGaaatpl: orq %r8,%r8 je .L_after_reduction_nwkAjutBGaaatpl vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_nwkAjutBGaaatpl: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_14_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %ymm29,%ymm11,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ffynzdqrsbdreFk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ffynzdqrsbdreFk .L_small_initial_partial_block_ffynzdqrsbdreFk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ffynzdqrsbdreFk: orq %r8,%r8 je .L_after_reduction_ffynzdqrsbdreFk vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_ffynzdqrsbdreFk: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_15_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hbmAeclAGCyurof subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hbmAeclAGCyurof .L_small_initial_partial_block_hbmAeclAGCyurof: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hbmAeclAGCyurof: orq %r8,%r8 je .L_after_reduction_hbmAeclAGCyurof vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_hbmAeclAGCyurof: jmp .L_small_initial_blocks_encrypted_fAioGdenAmmvupb .L_small_initial_num_blocks_is_16_fAioGdenAmmvupb: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_fvqkmnelfBwdflt: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_fvqkmnelfBwdflt: vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_fvqkmnelfBwdflt: .L_small_initial_blocks_encrypted_fAioGdenAmmvupb: .L_ghash_done_icBhFhCkojGgnBc: vmovdqu64 %xmm2,0(%rsi) .L_enc_dec_done_icBhFhCkojGgnBc: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) .L_enc_dec_abort_icBhFhCkojGgnBc: jmp .Lexit_gcm_decrypt .align 32 .Laes_gcm_decrypt_192_avx512: orq %r8,%r8 je .L_enc_dec_abort_efvnrtvwAsfehEC xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je .L_partial_block_done_Fvzomuuccfdfevt movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vmovdqa64 %xmm0,%xmm6 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge .L_no_extra_mask_Fvzomuuccfdfevt subq %r13,%r12 .L_no_extra_mask_Fvzomuuccfdfevt: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpand %xmm0,%xmm6,%xmm6 vpshufb SHUF_MASK(%rip),%xmm6,%xmm6 vpshufb %xmm5,%xmm6,%xmm6 vpxorq %xmm6,%xmm14,%xmm14 cmpq $0,%r13 jl .L_partial_incomplete_Fvzomuuccfdfevt .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp .L_enc_dec_done_Fvzomuuccfdfevt .L_partial_incomplete_Fvzomuuccfdfevt: addl %r8d,(%rdx) movq %r8,%rax .L_enc_dec_done_Fvzomuuccfdfevt: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} .L_partial_block_done_Fvzomuuccfdfevt: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je .L_enc_dec_done_efvnrtvwAsfehEC cmpq $256,%r8 jbe .L_message_below_equal_16_blocks_efvnrtvwAsfehEC vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae .L_next_16_overflow_bmCGDqjpElhfFfq vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_bmCGDqjpElhfFfq .L_next_16_overflow_bmCGDqjpElhfFfq: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_bmCGDqjpElhfFfq: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_mbihlziFEFsDoGE vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) .L_skip_hkeys_precomputation_mbihlziFEFsDoGE: cmpq $512,%r8 jb .L_message_below_32_blocks_efvnrtvwAsfehEC cmpb $240,%r15b jae .L_next_16_overflow_lakxgokamypkjgE vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_lakxgokamypkjgE .L_next_16_overflow_lakxgokamypkjgE: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_lakxgokamypkjgE: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz .L_skip_hkeys_precomputation_jpElfyvBextCmie vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) .L_skip_hkeys_precomputation_jpElfyvBextCmie: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb .L_no_more_big_nblocks_efvnrtvwAsfehEC .L_encrypt_big_nblocks_efvnrtvwAsfehEC: cmpb $240,%r15b jae .L_16_blocks_overflow_AlopGldBavsssnG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_AlopGldBavsssnG .L_16_blocks_overflow_AlopGldBavsssnG: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_AlopGldBavsssnG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_gaBCwkclDxgqitC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_gaBCwkclDxgqitC .L_16_blocks_overflow_gaBCwkclDxgqitC: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_gaBCwkclDxgqitC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_Fxngerofutwuigg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_Fxngerofutwuigg .L_16_blocks_overflow_Fxngerofutwuigg: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_Fxngerofutwuigg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae .L_encrypt_big_nblocks_efvnrtvwAsfehEC .L_no_more_big_nblocks_efvnrtvwAsfehEC: cmpq $512,%r8 jae .L_encrypt_32_blocks_efvnrtvwAsfehEC cmpq $256,%r8 jae .L_encrypt_16_blocks_efvnrtvwAsfehEC .L_encrypt_0_blocks_ghash_32_efvnrtvwAsfehEC: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_xtiyCEhGGvgkorn cmpl $8,%r10d je .L_last_num_blocks_is_8_xtiyCEhGGvgkorn jb .L_last_num_blocks_is_7_1_xtiyCEhGGvgkorn cmpl $12,%r10d je .L_last_num_blocks_is_12_xtiyCEhGGvgkorn jb .L_last_num_blocks_is_11_9_xtiyCEhGGvgkorn cmpl $15,%r10d je .L_last_num_blocks_is_15_xtiyCEhGGvgkorn ja .L_last_num_blocks_is_16_xtiyCEhGGvgkorn cmpl $14,%r10d je .L_last_num_blocks_is_14_xtiyCEhGGvgkorn jmp .L_last_num_blocks_is_13_xtiyCEhGGvgkorn .L_last_num_blocks_is_11_9_xtiyCEhGGvgkorn: cmpl $10,%r10d je .L_last_num_blocks_is_10_xtiyCEhGGvgkorn ja .L_last_num_blocks_is_11_xtiyCEhGGvgkorn jmp .L_last_num_blocks_is_9_xtiyCEhGGvgkorn .L_last_num_blocks_is_7_1_xtiyCEhGGvgkorn: cmpl $4,%r10d je .L_last_num_blocks_is_4_xtiyCEhGGvgkorn jb .L_last_num_blocks_is_3_1_xtiyCEhGGvgkorn cmpl $6,%r10d ja .L_last_num_blocks_is_7_xtiyCEhGGvgkorn je .L_last_num_blocks_is_6_xtiyCEhGGvgkorn jmp .L_last_num_blocks_is_5_xtiyCEhGGvgkorn .L_last_num_blocks_is_3_1_xtiyCEhGGvgkorn: cmpl $2,%r10d ja .L_last_num_blocks_is_3_xtiyCEhGGvgkorn je .L_last_num_blocks_is_2_xtiyCEhGGvgkorn .L_last_num_blocks_is_1_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_nlADBBgdbvxiiEb vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_nlADBBgdbvxiiEb .L_16_blocks_overflow_nlADBBgdbvxiiEb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_nlADBBgdbvxiiEb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_tqujgvqggqpCibu subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tqujgvqggqpCibu .L_small_initial_partial_block_tqujgvqggqpCibu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_tqujgvqggqpCibu .L_small_initial_compute_done_tqujgvqggqpCibu: .L_after_reduction_tqujgvqggqpCibu: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_2_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_uvnjGlBDyvrfirm vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_uvnjGlBDyvrfirm .L_16_blocks_overflow_uvnjGlBDyvrfirm: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_uvnjGlBDyvrfirm: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wDeAjiDoocmqspC subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wDeAjiDoocmqspC .L_small_initial_partial_block_wDeAjiDoocmqspC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wDeAjiDoocmqspC: orq %r8,%r8 je .L_after_reduction_wDeAjiDoocmqspC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wDeAjiDoocmqspC: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_3_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_FgovsDdCfEGrkbF vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_FgovsDdCfEGrkbF .L_16_blocks_overflow_FgovsDdCfEGrkbF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_FgovsDdCfEGrkbF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vofcBkyofakpciE subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vofcBkyofakpciE .L_small_initial_partial_block_vofcBkyofakpciE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vofcBkyofakpciE: orq %r8,%r8 je .L_after_reduction_vofcBkyofakpciE vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vofcBkyofakpciE: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_4_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_DlimwiDzackronx vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_DlimwiDzackronx .L_16_blocks_overflow_DlimwiDzackronx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_DlimwiDzackronx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_umExCeAmGaBqmig subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_umExCeAmGaBqmig .L_small_initial_partial_block_umExCeAmGaBqmig: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_umExCeAmGaBqmig: orq %r8,%r8 je .L_after_reduction_umExCeAmGaBqmig vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_umExCeAmGaBqmig: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_5_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_qGrgsssqhFxDdtg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_qGrgsssqhFxDdtg .L_16_blocks_overflow_qGrgsssqhFxDdtg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_qGrgsssqhFxDdtg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DkwztEgqyefkjcA subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DkwztEgqyefkjcA .L_small_initial_partial_block_DkwztEgqyefkjcA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DkwztEgqyefkjcA: orq %r8,%r8 je .L_after_reduction_DkwztEgqyefkjcA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DkwztEgqyefkjcA: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_6_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_ufEGEnqpAFAEymx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_ufEGEnqpAFAEymx .L_16_blocks_overflow_ufEGEnqpAFAEymx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_ufEGEnqpAFAEymx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AjqanfyCsBedpsg subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AjqanfyCsBedpsg .L_small_initial_partial_block_AjqanfyCsBedpsg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AjqanfyCsBedpsg: orq %r8,%r8 je .L_after_reduction_AjqanfyCsBedpsg vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AjqanfyCsBedpsg: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_7_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_xgpGrqoEEApwzGE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_xgpGrqoEEApwzGE .L_16_blocks_overflow_xgpGrqoEEApwzGE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_xgpGrqoEEApwzGE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lrumjmlatrsmlag subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lrumjmlatrsmlag .L_small_initial_partial_block_lrumjmlatrsmlag: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lrumjmlatrsmlag: orq %r8,%r8 je .L_after_reduction_lrumjmlatrsmlag vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lrumjmlatrsmlag: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_8_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_DBafwcnsvcxAbsv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_DBafwcnsvcxAbsv .L_16_blocks_overflow_DBafwcnsvcxAbsv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_DBafwcnsvcxAbsv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vptqcrjpEiCjEDi subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vptqcrjpEiCjEDi .L_small_initial_partial_block_vptqcrjpEiCjEDi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vptqcrjpEiCjEDi: orq %r8,%r8 je .L_after_reduction_vptqcrjpEiCjEDi vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vptqcrjpEiCjEDi: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_9_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_muonozkGretEzbg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_muonozkGretEzbg .L_16_blocks_overflow_muonozkGretEzbg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_muonozkGretEzbg: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tkpprjhbsieissq subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tkpprjhbsieissq .L_small_initial_partial_block_tkpprjhbsieissq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tkpprjhbsieissq: orq %r8,%r8 je .L_after_reduction_tkpprjhbsieissq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tkpprjhbsieissq: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_10_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_tcxAtedExcFvxwb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_tcxAtedExcFvxwb .L_16_blocks_overflow_tcxAtedExcFvxwb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_tcxAtedExcFvxwb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jcddyvvAxCAjvqC subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jcddyvvAxCAjvqC .L_small_initial_partial_block_jcddyvvAxCAjvqC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jcddyvvAxCAjvqC: orq %r8,%r8 je .L_after_reduction_jcddyvvAxCAjvqC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jcddyvvAxCAjvqC: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_11_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_oCyoemhjBbobeot vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_oCyoemhjBbobeot .L_16_blocks_overflow_oCyoemhjBbobeot: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_oCyoemhjBbobeot: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DusiGqzupzswzGi subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DusiGqzupzswzGi .L_small_initial_partial_block_DusiGqzupzswzGi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DusiGqzupzswzGi: orq %r8,%r8 je .L_after_reduction_DusiGqzupzswzGi vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DusiGqzupzswzGi: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_12_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_rechbAAmkFuppsn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_rechbAAmkFuppsn .L_16_blocks_overflow_rechbAAmkFuppsn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_rechbAAmkFuppsn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lgDrfakaDoGugoh subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lgDrfakaDoGugoh .L_small_initial_partial_block_lgDrfakaDoGugoh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lgDrfakaDoGugoh: orq %r8,%r8 je .L_after_reduction_lgDrfakaDoGugoh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lgDrfakaDoGugoh: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_13_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_llFkwrFhuxfvsGD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_llFkwrFhuxfvsGD .L_16_blocks_overflow_llFkwrFhuxfvsGD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_llFkwrFhuxfvsGD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qwqiEdfkpnfpFcA subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qwqiEdfkpnfpFcA .L_small_initial_partial_block_qwqiEdfkpnfpFcA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qwqiEdfkpnfpFcA: orq %r8,%r8 je .L_after_reduction_qwqiEdfkpnfpFcA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qwqiEdfkpnfpFcA: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_14_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_euGgDuqlvgCFoFG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_euGgDuqlvgCFoFG .L_16_blocks_overflow_euGgDuqlvgCFoFG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_euGgDuqlvgCFoFG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jAmrCFqArnxiBwr subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jAmrCFqArnxiBwr .L_small_initial_partial_block_jAmrCFqArnxiBwr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jAmrCFqArnxiBwr: orq %r8,%r8 je .L_after_reduction_jAmrCFqArnxiBwr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jAmrCFqArnxiBwr: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_15_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_pFsoEbjdpyaFdzt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_pFsoEbjdpyaFdzt .L_16_blocks_overflow_pFsoEbjdpyaFdzt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_pFsoEbjdpyaFdzt: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ynvaqdiwqpExsAh subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ynvaqdiwqpExsAh .L_small_initial_partial_block_ynvaqdiwqpExsAh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ynvaqdiwqpExsAh: orq %r8,%r8 je .L_after_reduction_ynvaqdiwqpExsAh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ynvaqdiwqpExsAh: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_16_xtiyCEhGGvgkorn: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_lxzkkenajCqycbF vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_lxzkkenajCqycbF .L_16_blocks_overflow_lxzkkenajCqycbF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_lxzkkenajCqycbF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_rerkgBbyampldto: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rerkgBbyampldto: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rerkgBbyampldto: jmp .L_last_blocks_done_xtiyCEhGGvgkorn .L_last_num_blocks_is_0_xtiyCEhGGvgkorn: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_xtiyCEhGGvgkorn: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_efvnrtvwAsfehEC .L_encrypt_32_blocks_efvnrtvwAsfehEC: cmpb $240,%r15b jae .L_16_blocks_overflow_kzaebDdDwylbAcu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_kzaebDdDwylbAcu .L_16_blocks_overflow_kzaebDdDwylbAcu: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_kzaebDdDwylbAcu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_hfufmxvqjkdtxiG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_hfufmxvqjkdtxiG .L_16_blocks_overflow_hfufmxvqjkdtxiG: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_hfufmxvqjkdtxiG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_nGrjyBwfEzyFAvA cmpl $8,%r10d je .L_last_num_blocks_is_8_nGrjyBwfEzyFAvA jb .L_last_num_blocks_is_7_1_nGrjyBwfEzyFAvA cmpl $12,%r10d je .L_last_num_blocks_is_12_nGrjyBwfEzyFAvA jb .L_last_num_blocks_is_11_9_nGrjyBwfEzyFAvA cmpl $15,%r10d je .L_last_num_blocks_is_15_nGrjyBwfEzyFAvA ja .L_last_num_blocks_is_16_nGrjyBwfEzyFAvA cmpl $14,%r10d je .L_last_num_blocks_is_14_nGrjyBwfEzyFAvA jmp .L_last_num_blocks_is_13_nGrjyBwfEzyFAvA .L_last_num_blocks_is_11_9_nGrjyBwfEzyFAvA: cmpl $10,%r10d je .L_last_num_blocks_is_10_nGrjyBwfEzyFAvA ja .L_last_num_blocks_is_11_nGrjyBwfEzyFAvA jmp .L_last_num_blocks_is_9_nGrjyBwfEzyFAvA .L_last_num_blocks_is_7_1_nGrjyBwfEzyFAvA: cmpl $4,%r10d je .L_last_num_blocks_is_4_nGrjyBwfEzyFAvA jb .L_last_num_blocks_is_3_1_nGrjyBwfEzyFAvA cmpl $6,%r10d ja .L_last_num_blocks_is_7_nGrjyBwfEzyFAvA je .L_last_num_blocks_is_6_nGrjyBwfEzyFAvA jmp .L_last_num_blocks_is_5_nGrjyBwfEzyFAvA .L_last_num_blocks_is_3_1_nGrjyBwfEzyFAvA: cmpl $2,%r10d ja .L_last_num_blocks_is_3_nGrjyBwfEzyFAvA je .L_last_num_blocks_is_2_nGrjyBwfEzyFAvA .L_last_num_blocks_is_1_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_mklqBGCbyBjeEom vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_mklqBGCbyBjeEom .L_16_blocks_overflow_mklqBGCbyBjeEom: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_mklqBGCbyBjeEom: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_AljAsgffjDBAEDB subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AljAsgffjDBAEDB .L_small_initial_partial_block_AljAsgffjDBAEDB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_AljAsgffjDBAEDB .L_small_initial_compute_done_AljAsgffjDBAEDB: .L_after_reduction_AljAsgffjDBAEDB: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_2_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_ADzEaGzEEnztayt vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_ADzEaGzEEnztayt .L_16_blocks_overflow_ADzEaGzEEnztayt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_ADzEaGzEEnztayt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_awmEjFhlgwizrsw subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_awmEjFhlgwizrsw .L_small_initial_partial_block_awmEjFhlgwizrsw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_awmEjFhlgwizrsw: orq %r8,%r8 je .L_after_reduction_awmEjFhlgwizrsw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_awmEjFhlgwizrsw: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_3_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_lcaBxDbeGChbeFD vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_lcaBxDbeGChbeFD .L_16_blocks_overflow_lcaBxDbeGChbeFD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_lcaBxDbeGChbeFD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mBDBtxmxpwzmxwj subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mBDBtxmxpwzmxwj .L_small_initial_partial_block_mBDBtxmxpwzmxwj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mBDBtxmxpwzmxwj: orq %r8,%r8 je .L_after_reduction_mBDBtxmxpwzmxwj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_mBDBtxmxpwzmxwj: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_4_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_pawpbdkivckxDwC vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_pawpbdkivckxDwC .L_16_blocks_overflow_pawpbdkivckxDwC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_pawpbdkivckxDwC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wubpbikcrdlgswu subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wubpbikcrdlgswu .L_small_initial_partial_block_wubpbikcrdlgswu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wubpbikcrdlgswu: orq %r8,%r8 je .L_after_reduction_wubpbikcrdlgswu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wubpbikcrdlgswu: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_5_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_DaxgvFmGcDpdBDr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_DaxgvFmGcDpdBDr .L_16_blocks_overflow_DaxgvFmGcDpdBDr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_DaxgvFmGcDpdBDr: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wfpxmlzpEjGxgfg subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wfpxmlzpEjGxgfg .L_small_initial_partial_block_wfpxmlzpEjGxgfg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wfpxmlzpEjGxgfg: orq %r8,%r8 je .L_after_reduction_wfpxmlzpEjGxgfg vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wfpxmlzpEjGxgfg: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_6_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_GCBuEfGizfDEkbf vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_GCBuEfGizfDEkbf .L_16_blocks_overflow_GCBuEfGizfDEkbf: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_GCBuEfGizfDEkbf: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_bsGacaiacduekkh subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_bsGacaiacduekkh .L_small_initial_partial_block_bsGacaiacduekkh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_bsGacaiacduekkh: orq %r8,%r8 je .L_after_reduction_bsGacaiacduekkh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_bsGacaiacduekkh: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_7_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_sxxwCglaApctqvC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_sxxwCglaApctqvC .L_16_blocks_overflow_sxxwCglaApctqvC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_sxxwCglaApctqvC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yiCblticDBdDvqz subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yiCblticDBdDvqz .L_small_initial_partial_block_yiCblticDBdDvqz: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yiCblticDBdDvqz: orq %r8,%r8 je .L_after_reduction_yiCblticDBdDvqz vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yiCblticDBdDvqz: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_8_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_CnnuddjEBnFGdsj vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_CnnuddjEBnFGdsj .L_16_blocks_overflow_CnnuddjEBnFGdsj: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_CnnuddjEBnFGdsj: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ozBrEzEFaraubuw subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ozBrEzEFaraubuw .L_small_initial_partial_block_ozBrEzEFaraubuw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ozBrEzEFaraubuw: orq %r8,%r8 je .L_after_reduction_ozBrEzEFaraubuw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ozBrEzEFaraubuw: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_9_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_jwawBbqsGrnbEEd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_jwawBbqsGrnbEEd .L_16_blocks_overflow_jwawBbqsGrnbEEd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_jwawBbqsGrnbEEd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FtmdhyAthqlklcF subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FtmdhyAthqlklcF .L_small_initial_partial_block_FtmdhyAthqlklcF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FtmdhyAthqlklcF: orq %r8,%r8 je .L_after_reduction_FtmdhyAthqlklcF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FtmdhyAthqlklcF: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_10_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_bEhtipvqjwytqAA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_bEhtipvqjwytqAA .L_16_blocks_overflow_bEhtipvqjwytqAA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_bEhtipvqjwytqAA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dGfczcdzdkvubwf subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dGfczcdzdkvubwf .L_small_initial_partial_block_dGfczcdzdkvubwf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dGfczcdzdkvubwf: orq %r8,%r8 je .L_after_reduction_dGfczcdzdkvubwf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dGfczcdzdkvubwf: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_11_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_peywgEttBymhlkG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_peywgEttBymhlkG .L_16_blocks_overflow_peywgEttBymhlkG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_peywgEttBymhlkG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kGoguFjBsnAyegA subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kGoguFjBsnAyegA .L_small_initial_partial_block_kGoguFjBsnAyegA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kGoguFjBsnAyegA: orq %r8,%r8 je .L_after_reduction_kGoguFjBsnAyegA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kGoguFjBsnAyegA: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_12_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_pfftEtegsrsinbs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_pfftEtegsrsinbs .L_16_blocks_overflow_pfftEtegsrsinbs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_pfftEtegsrsinbs: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_epFvAomFdDAhsfr subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_epFvAomFdDAhsfr .L_small_initial_partial_block_epFvAomFdDAhsfr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_epFvAomFdDAhsfr: orq %r8,%r8 je .L_after_reduction_epFvAomFdDAhsfr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_epFvAomFdDAhsfr: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_13_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_hoEpuvlFtAdDDCj vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_hoEpuvlFtAdDDCj .L_16_blocks_overflow_hoEpuvlFtAdDDCj: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_hoEpuvlFtAdDDCj: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lwaCfdsabqxsDae subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lwaCfdsabqxsDae .L_small_initial_partial_block_lwaCfdsabqxsDae: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lwaCfdsabqxsDae: orq %r8,%r8 je .L_after_reduction_lwaCfdsabqxsDae vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lwaCfdsabqxsDae: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_14_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_kDibsGzbehdlyln vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_kDibsGzbehdlyln .L_16_blocks_overflow_kDibsGzbehdlyln: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_kDibsGzbehdlyln: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_aygEgEvDgGbktBd subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_aygEgEvDgGbktBd .L_small_initial_partial_block_aygEgEvDgGbktBd: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_aygEgEvDgGbktBd: orq %r8,%r8 je .L_after_reduction_aygEgEvDgGbktBd vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_aygEgEvDgGbktBd: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_15_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_vejCgbGykbnkAnl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_vejCgbGykbnkAnl .L_16_blocks_overflow_vejCgbGykbnkAnl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_vejCgbGykbnkAnl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AeaGuuDepzdAfkw subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AeaGuuDepzdAfkw .L_small_initial_partial_block_AeaGuuDepzdAfkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AeaGuuDepzdAfkw: orq %r8,%r8 je .L_after_reduction_AeaGuuDepzdAfkw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AeaGuuDepzdAfkw: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_16_nGrjyBwfEzyFAvA: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_oEmrkvwdwsmBgef vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_oEmrkvwdwsmBgef .L_16_blocks_overflow_oEmrkvwdwsmBgef: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_oEmrkvwdwsmBgef: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_emEtFnwcsvbsGee: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_emEtFnwcsvbsGee: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_emEtFnwcsvbsGee: jmp .L_last_blocks_done_nGrjyBwfEzyFAvA .L_last_num_blocks_is_0_nGrjyBwfEzyFAvA: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_nGrjyBwfEzyFAvA: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_efvnrtvwAsfehEC .L_encrypt_16_blocks_efvnrtvwAsfehEC: cmpb $240,%r15b jae .L_16_blocks_overflow_evgrutpeAjmaukd vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_evgrutpeAjmaukd .L_16_blocks_overflow_evgrutpeAjmaukd: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_evgrutpeAjmaukd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_FBaFgdErDhzlksr cmpl $8,%r10d je .L_last_num_blocks_is_8_FBaFgdErDhzlksr jb .L_last_num_blocks_is_7_1_FBaFgdErDhzlksr cmpl $12,%r10d je .L_last_num_blocks_is_12_FBaFgdErDhzlksr jb .L_last_num_blocks_is_11_9_FBaFgdErDhzlksr cmpl $15,%r10d je .L_last_num_blocks_is_15_FBaFgdErDhzlksr ja .L_last_num_blocks_is_16_FBaFgdErDhzlksr cmpl $14,%r10d je .L_last_num_blocks_is_14_FBaFgdErDhzlksr jmp .L_last_num_blocks_is_13_FBaFgdErDhzlksr .L_last_num_blocks_is_11_9_FBaFgdErDhzlksr: cmpl $10,%r10d je .L_last_num_blocks_is_10_FBaFgdErDhzlksr ja .L_last_num_blocks_is_11_FBaFgdErDhzlksr jmp .L_last_num_blocks_is_9_FBaFgdErDhzlksr .L_last_num_blocks_is_7_1_FBaFgdErDhzlksr: cmpl $4,%r10d je .L_last_num_blocks_is_4_FBaFgdErDhzlksr jb .L_last_num_blocks_is_3_1_FBaFgdErDhzlksr cmpl $6,%r10d ja .L_last_num_blocks_is_7_FBaFgdErDhzlksr je .L_last_num_blocks_is_6_FBaFgdErDhzlksr jmp .L_last_num_blocks_is_5_FBaFgdErDhzlksr .L_last_num_blocks_is_3_1_FBaFgdErDhzlksr: cmpl $2,%r10d ja .L_last_num_blocks_is_3_FBaFgdErDhzlksr je .L_last_num_blocks_is_2_FBaFgdErDhzlksr .L_last_num_blocks_is_1_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_EztzACczExrozqe vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_EztzACczExrozqe .L_16_blocks_overflow_EztzACczExrozqe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_EztzACczExrozqe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_CCCssCzirDpGCgu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_CCCssCzirDpGCgu .L_small_initial_partial_block_CCCssCzirDpGCgu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_CCCssCzirDpGCgu .L_small_initial_compute_done_CCCssCzirDpGCgu: .L_after_reduction_CCCssCzirDpGCgu: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_2_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_ddpheeylmysesqA vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_ddpheeylmysesqA .L_16_blocks_overflow_ddpheeylmysesqA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_ddpheeylmysesqA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kEwhkniEotxddri subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kEwhkniEotxddri .L_small_initial_partial_block_kEwhkniEotxddri: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kEwhkniEotxddri: orq %r8,%r8 je .L_after_reduction_kEwhkniEotxddri vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kEwhkniEotxddri: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_3_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_vAzgdsEEohhszlv vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_vAzgdsEEohhszlv .L_16_blocks_overflow_vAzgdsEEohhszlv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_vAzgdsEEohhszlv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vgpvCquElabkfFm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vgpvCquElabkfFm .L_small_initial_partial_block_vgpvCquElabkfFm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vgpvCquElabkfFm: orq %r8,%r8 je .L_after_reduction_vgpvCquElabkfFm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vgpvCquElabkfFm: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_4_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_ciiDnbwsdfFhyEA vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_ciiDnbwsdfFhyEA .L_16_blocks_overflow_ciiDnbwsdfFhyEA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_ciiDnbwsdfFhyEA: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BmnofkldoqxnfuE subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BmnofkldoqxnfuE .L_small_initial_partial_block_BmnofkldoqxnfuE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BmnofkldoqxnfuE: orq %r8,%r8 je .L_after_reduction_BmnofkldoqxnfuE vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BmnofkldoqxnfuE: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_5_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_AGvFmhBetCxAviv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_AGvFmhBetCxAviv .L_16_blocks_overflow_AGvFmhBetCxAviv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_AGvFmhBetCxAviv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tnjvAdygufmEFFh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tnjvAdygufmEFFh .L_small_initial_partial_block_tnjvAdygufmEFFh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tnjvAdygufmEFFh: orq %r8,%r8 je .L_after_reduction_tnjvAdygufmEFFh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tnjvAdygufmEFFh: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_6_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_sjympigbCCDhsDn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_sjympigbCCDhsDn .L_16_blocks_overflow_sjympigbCCDhsDn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_sjympigbCCDhsDn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_cnyvDpbBAuzhoGm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_cnyvDpbBAuzhoGm .L_small_initial_partial_block_cnyvDpbBAuzhoGm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cnyvDpbBAuzhoGm: orq %r8,%r8 je .L_after_reduction_cnyvDpbBAuzhoGm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_cnyvDpbBAuzhoGm: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_7_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_puBiejaewnoDvka vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_puBiejaewnoDvka .L_16_blocks_overflow_puBiejaewnoDvka: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_puBiejaewnoDvka: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kEkkBlBkynveErA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kEkkBlBkynveErA .L_small_initial_partial_block_kEkkBlBkynveErA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kEkkBlBkynveErA: orq %r8,%r8 je .L_after_reduction_kEkkBlBkynveErA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kEkkBlBkynveErA: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_8_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_eaeCeiduedGDdDq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_eaeCeiduedGDdDq .L_16_blocks_overflow_eaeCeiduedGDdDq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_eaeCeiduedGDdDq: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qsuohqatcFrqreB subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qsuohqatcFrqreB .L_small_initial_partial_block_qsuohqatcFrqreB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qsuohqatcFrqreB: orq %r8,%r8 je .L_after_reduction_qsuohqatcFrqreB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qsuohqatcFrqreB: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_9_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_zgrBucdeiivwwje vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_zgrBucdeiivwwje .L_16_blocks_overflow_zgrBucdeiivwwje: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_zgrBucdeiivwwje: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BobokvzEgBCGCux subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BobokvzEgBCGCux .L_small_initial_partial_block_BobokvzEgBCGCux: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BobokvzEgBCGCux: orq %r8,%r8 je .L_after_reduction_BobokvzEgBCGCux vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BobokvzEgBCGCux: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_10_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_rqjyEzzCiBijwho vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_rqjyEzzCiBijwho .L_16_blocks_overflow_rqjyEzzCiBijwho: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_rqjyEzzCiBijwho: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_gptBtCibyiDhlou subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_gptBtCibyiDhlou .L_small_initial_partial_block_gptBtCibyiDhlou: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_gptBtCibyiDhlou: orq %r8,%r8 je .L_after_reduction_gptBtCibyiDhlou vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_gptBtCibyiDhlou: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_11_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_udirAnChEpiDCdb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_udirAnChEpiDCdb .L_16_blocks_overflow_udirAnChEpiDCdb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_udirAnChEpiDCdb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EuymoBDpuhDzkkw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EuymoBDpuhDzkkw .L_small_initial_partial_block_EuymoBDpuhDzkkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EuymoBDpuhDzkkw: orq %r8,%r8 je .L_after_reduction_EuymoBDpuhDzkkw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EuymoBDpuhDzkkw: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_12_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_nCrveguADGnpgFu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_nCrveguADGnpgFu .L_16_blocks_overflow_nCrveguADGnpgFu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_nCrveguADGnpgFu: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EbClbforwjDGhdq subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EbClbforwjDGhdq .L_small_initial_partial_block_EbClbforwjDGhdq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EbClbforwjDGhdq: orq %r8,%r8 je .L_after_reduction_EbClbforwjDGhdq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EbClbforwjDGhdq: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_13_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_FuAeDsuGfAcCbnh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_FuAeDsuGfAcCbnh .L_16_blocks_overflow_FuAeDsuGfAcCbnh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_FuAeDsuGfAcCbnh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GdeeilznaFbDlhh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GdeeilznaFbDlhh .L_small_initial_partial_block_GdeeilznaFbDlhh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GdeeilznaFbDlhh: orq %r8,%r8 je .L_after_reduction_GdeeilznaFbDlhh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GdeeilznaFbDlhh: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_14_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_FvEhyckDsphilDy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_FvEhyckDsphilDy .L_16_blocks_overflow_FvEhyckDsphilDy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_FvEhyckDsphilDy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_fkoDbsekulkxCkw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_fkoDbsekulkxCkw .L_small_initial_partial_block_fkoDbsekulkxCkw: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_fkoDbsekulkxCkw: orq %r8,%r8 je .L_after_reduction_fkoDbsekulkxCkw vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_fkoDbsekulkxCkw: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_15_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_lpConoqwylkjlwn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_lpConoqwylkjlwn .L_16_blocks_overflow_lpConoqwylkjlwn: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_lpConoqwylkjlwn: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DbmjnDvmvfAywny subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DbmjnDvmvfAywny .L_small_initial_partial_block_DbmjnDvmvfAywny: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DbmjnDvmvfAywny: orq %r8,%r8 je .L_after_reduction_DbmjnDvmvfAywny vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_DbmjnDvmvfAywny: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_16_FBaFgdErDhzlksr: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_vhaFwxkrByAhtie vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_vhaFwxkrByAhtie .L_16_blocks_overflow_vhaFwxkrByAhtie: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_vhaFwxkrByAhtie: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_ciyykzjryphtjAc: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ciyykzjryphtjAc: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ciyykzjryphtjAc: jmp .L_last_blocks_done_FBaFgdErDhzlksr .L_last_num_blocks_is_0_FBaFgdErDhzlksr: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_FBaFgdErDhzlksr: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_efvnrtvwAsfehEC .L_message_below_32_blocks_efvnrtvwAsfehEC: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_yBFabBiEpjEBBsr vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .L_skip_hkeys_precomputation_yBFabBiEpjEBBsr: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_cuuhesezwjvjmyk cmpl $8,%r10d je .L_last_num_blocks_is_8_cuuhesezwjvjmyk jb .L_last_num_blocks_is_7_1_cuuhesezwjvjmyk cmpl $12,%r10d je .L_last_num_blocks_is_12_cuuhesezwjvjmyk jb .L_last_num_blocks_is_11_9_cuuhesezwjvjmyk cmpl $15,%r10d je .L_last_num_blocks_is_15_cuuhesezwjvjmyk ja .L_last_num_blocks_is_16_cuuhesezwjvjmyk cmpl $14,%r10d je .L_last_num_blocks_is_14_cuuhesezwjvjmyk jmp .L_last_num_blocks_is_13_cuuhesezwjvjmyk .L_last_num_blocks_is_11_9_cuuhesezwjvjmyk: cmpl $10,%r10d je .L_last_num_blocks_is_10_cuuhesezwjvjmyk ja .L_last_num_blocks_is_11_cuuhesezwjvjmyk jmp .L_last_num_blocks_is_9_cuuhesezwjvjmyk .L_last_num_blocks_is_7_1_cuuhesezwjvjmyk: cmpl $4,%r10d je .L_last_num_blocks_is_4_cuuhesezwjvjmyk jb .L_last_num_blocks_is_3_1_cuuhesezwjvjmyk cmpl $6,%r10d ja .L_last_num_blocks_is_7_cuuhesezwjvjmyk je .L_last_num_blocks_is_6_cuuhesezwjvjmyk jmp .L_last_num_blocks_is_5_cuuhesezwjvjmyk .L_last_num_blocks_is_3_1_cuuhesezwjvjmyk: cmpl $2,%r10d ja .L_last_num_blocks_is_3_cuuhesezwjvjmyk je .L_last_num_blocks_is_2_cuuhesezwjvjmyk .L_last_num_blocks_is_1_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_yqjovttCDEvpyyd vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_yqjovttCDEvpyyd .L_16_blocks_overflow_yqjovttCDEvpyyd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_yqjovttCDEvpyyd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_vEAkobbEjFEfDjE subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vEAkobbEjFEfDjE .L_small_initial_partial_block_vEAkobbEjFEfDjE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_vEAkobbEjFEfDjE .L_small_initial_compute_done_vEAkobbEjFEfDjE: .L_after_reduction_vEAkobbEjFEfDjE: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_2_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_dunlemEBzoyBoxa vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_dunlemEBzoyBoxa .L_16_blocks_overflow_dunlemEBzoyBoxa: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_dunlemEBzoyBoxa: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jwqibvpanppwwkg subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jwqibvpanppwwkg .L_small_initial_partial_block_jwqibvpanppwwkg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jwqibvpanppwwkg: orq %r8,%r8 je .L_after_reduction_jwqibvpanppwwkg vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_jwqibvpanppwwkg: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_3_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_gknxnDbcehnficG vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_gknxnDbcehnficG .L_16_blocks_overflow_gknxnDbcehnficG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_gknxnDbcehnficG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yrqnxcGbhfxbzua subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yrqnxcGbhfxbzua .L_small_initial_partial_block_yrqnxcGbhfxbzua: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yrqnxcGbhfxbzua: orq %r8,%r8 je .L_after_reduction_yrqnxcGbhfxbzua vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yrqnxcGbhfxbzua: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_4_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_vkChDblsuoFkgEp vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_vkChDblsuoFkgEp .L_16_blocks_overflow_vkChDblsuoFkgEp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_vkChDblsuoFkgEp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kqcfotnkDdwFCle subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kqcfotnkDdwFCle .L_small_initial_partial_block_kqcfotnkDdwFCle: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kqcfotnkDdwFCle: orq %r8,%r8 je .L_after_reduction_kqcfotnkDdwFCle vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kqcfotnkDdwFCle: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_5_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_aGCpdetktlAtivE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_aGCpdetktlAtivE .L_16_blocks_overflow_aGCpdetktlAtivE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_aGCpdetktlAtivE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BhyxbheFwtzAGqD subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BhyxbheFwtzAGqD .L_small_initial_partial_block_BhyxbheFwtzAGqD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BhyxbheFwtzAGqD: orq %r8,%r8 je .L_after_reduction_BhyxbheFwtzAGqD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BhyxbheFwtzAGqD: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_6_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_DlEhcmhmAqggthl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_DlEhcmhmAqggthl .L_16_blocks_overflow_DlEhcmhmAqggthl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_DlEhcmhmAqggthl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ykkpmhjniEvyltu subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ykkpmhjniEvyltu .L_small_initial_partial_block_ykkpmhjniEvyltu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ykkpmhjniEvyltu: orq %r8,%r8 je .L_after_reduction_ykkpmhjniEvyltu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ykkpmhjniEvyltu: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_7_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_szxcAmcFcFxFikD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_szxcAmcFcFxFikD .L_16_blocks_overflow_szxcAmcFcFxFikD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_szxcAmcFcFxFikD: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BsvCgmoprgDppla subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BsvCgmoprgDppla .L_small_initial_partial_block_BsvCgmoprgDppla: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BsvCgmoprgDppla: orq %r8,%r8 je .L_after_reduction_BsvCgmoprgDppla vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BsvCgmoprgDppla: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_8_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_nGgmonbofwfdiqp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_nGgmonbofwfdiqp .L_16_blocks_overflow_nGgmonbofwfdiqp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_nGgmonbofwfdiqp: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qxxbtfdlDzEAenB subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qxxbtfdlDzEAenB .L_small_initial_partial_block_qxxbtfdlDzEAenB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qxxbtfdlDzEAenB: orq %r8,%r8 je .L_after_reduction_qxxbtfdlDzEAenB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qxxbtfdlDzEAenB: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_9_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_isErwnbzGxuwnib vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_isErwnbzGxuwnib .L_16_blocks_overflow_isErwnbzGxuwnib: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_isErwnbzGxuwnib: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ldosriajsdgdtty subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ldosriajsdgdtty .L_small_initial_partial_block_ldosriajsdgdtty: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ldosriajsdgdtty: orq %r8,%r8 je .L_after_reduction_ldosriajsdgdtty vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ldosriajsdgdtty: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_10_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_ylkmjtxhbazdAht vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_ylkmjtxhbazdAht .L_16_blocks_overflow_ylkmjtxhbazdAht: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_ylkmjtxhbazdAht: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_cogjdCgsFwwACAv subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_cogjdCgsFwwACAv .L_small_initial_partial_block_cogjdCgsFwwACAv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cogjdCgsFwwACAv: orq %r8,%r8 je .L_after_reduction_cogjdCgsFwwACAv vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_cogjdCgsFwwACAv: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_11_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_amFqbyqnsgkbEyu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_amFqbyqnsgkbEyu .L_16_blocks_overflow_amFqbyqnsgkbEyu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_amFqbyqnsgkbEyu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_duCGbqEavktkktr subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_duCGbqEavktkktr .L_small_initial_partial_block_duCGbqEavktkktr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_duCGbqEavktkktr: orq %r8,%r8 je .L_after_reduction_duCGbqEavktkktr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_duCGbqEavktkktr: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_12_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_Gxdljjoscahpipo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_Gxdljjoscahpipo .L_16_blocks_overflow_Gxdljjoscahpipo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_Gxdljjoscahpipo: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EbrtvvbpfhnmgEG subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EbrtvvbpfhnmgEG .L_small_initial_partial_block_EbrtvvbpfhnmgEG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EbrtvvbpfhnmgEG: orq %r8,%r8 je .L_after_reduction_EbrtvvbpfhnmgEG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EbrtvvbpfhnmgEG: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_13_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_BzbwlusABaejjjy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_BzbwlusABaejjjy .L_16_blocks_overflow_BzbwlusABaejjjy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_BzbwlusABaejjjy: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_axssylktqnfAEEo subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_axssylktqnfAEEo .L_small_initial_partial_block_axssylktqnfAEEo: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_axssylktqnfAEEo: orq %r8,%r8 je .L_after_reduction_axssylktqnfAEEo vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_axssylktqnfAEEo: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_14_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_wfxluBeiqgADmFb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_wfxluBeiqgADmFb .L_16_blocks_overflow_wfxluBeiqgADmFb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_wfxluBeiqgADmFb: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_goygycijAEpsvvt subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_goygycijAEpsvvt .L_small_initial_partial_block_goygycijAEpsvvt: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_goygycijAEpsvvt: orq %r8,%r8 je .L_after_reduction_goygycijAEpsvvt vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_goygycijAEpsvvt: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_15_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_vyklFkDwzsnvgsC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_vyklFkDwzsnvgsC .L_16_blocks_overflow_vyklFkDwzsnvgsC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_vyklFkDwzsnvgsC: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wtfwhoaquntnsFC subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wtfwhoaquntnsFC .L_small_initial_partial_block_wtfwhoaquntnsFC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wtfwhoaquntnsFC: orq %r8,%r8 je .L_after_reduction_wtfwhoaquntnsFC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wtfwhoaquntnsFC: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_16_cuuhesezwjvjmyk: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_cwmmduuojwChbzc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_cwmmduuojwChbzc .L_16_blocks_overflow_cwmmduuojwChbzc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_cwmmduuojwChbzc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_EFFoGallwwbomEy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EFFoGallwwbomEy: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EFFoGallwwbomEy: jmp .L_last_blocks_done_cuuhesezwjvjmyk .L_last_num_blocks_is_0_cuuhesezwjvjmyk: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_cuuhesezwjvjmyk: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_efvnrtvwAsfehEC .L_message_below_equal_16_blocks_efvnrtvwAsfehEC: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je .L_small_initial_num_blocks_is_8_sFoDGktxtpnDmhn jl .L_small_initial_num_blocks_is_7_1_sFoDGktxtpnDmhn cmpq $12,%r12 je .L_small_initial_num_blocks_is_12_sFoDGktxtpnDmhn jl .L_small_initial_num_blocks_is_11_9_sFoDGktxtpnDmhn cmpq $16,%r12 je .L_small_initial_num_blocks_is_16_sFoDGktxtpnDmhn cmpq $15,%r12 je .L_small_initial_num_blocks_is_15_sFoDGktxtpnDmhn cmpq $14,%r12 je .L_small_initial_num_blocks_is_14_sFoDGktxtpnDmhn jmp .L_small_initial_num_blocks_is_13_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_11_9_sFoDGktxtpnDmhn: cmpq $11,%r12 je .L_small_initial_num_blocks_is_11_sFoDGktxtpnDmhn cmpq $10,%r12 je .L_small_initial_num_blocks_is_10_sFoDGktxtpnDmhn jmp .L_small_initial_num_blocks_is_9_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_7_1_sFoDGktxtpnDmhn: cmpq $4,%r12 je .L_small_initial_num_blocks_is_4_sFoDGktxtpnDmhn jl .L_small_initial_num_blocks_is_3_1_sFoDGktxtpnDmhn cmpq $7,%r12 je .L_small_initial_num_blocks_is_7_sFoDGktxtpnDmhn cmpq $6,%r12 je .L_small_initial_num_blocks_is_6_sFoDGktxtpnDmhn jmp .L_small_initial_num_blocks_is_5_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_3_1_sFoDGktxtpnDmhn: cmpq $3,%r12 je .L_small_initial_num_blocks_is_3_sFoDGktxtpnDmhn cmpq $2,%r12 je .L_small_initial_num_blocks_is_2_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_1_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm6,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_FGCgmvsGdutropz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FGCgmvsGdutropz .L_small_initial_partial_block_FGCgmvsGdutropz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp .L_after_reduction_FGCgmvsGdutropz .L_small_initial_compute_done_FGCgmvsGdutropz: .L_after_reduction_FGCgmvsGdutropz: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_2_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm6,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jEBbtDDBfBjEltG subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jEBbtDDBfBjEltG .L_small_initial_partial_block_jEBbtDDBfBjEltG: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jEBbtDDBfBjEltG: orq %r8,%r8 je .L_after_reduction_jEBbtDDBfBjEltG vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_jEBbtDDBfBjEltG: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_3_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EjibsatBlzkgqAl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EjibsatBlzkgqAl .L_small_initial_partial_block_EjibsatBlzkgqAl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EjibsatBlzkgqAl: orq %r8,%r8 je .L_after_reduction_EjibsatBlzkgqAl vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_EjibsatBlzkgqAl: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_4_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xqconsagugmDarn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xqconsagugmDarn .L_small_initial_partial_block_xqconsagugmDarn: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xqconsagugmDarn: orq %r8,%r8 je .L_after_reduction_xqconsagugmDarn vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_xqconsagugmDarn: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_5_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %xmm29,%xmm7,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_pbFCejpvpmxjAhk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pbFCejpvpmxjAhk .L_small_initial_partial_block_pbFCejpvpmxjAhk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pbFCejpvpmxjAhk: orq %r8,%r8 je .L_after_reduction_pbFCejpvpmxjAhk vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_pbFCejpvpmxjAhk: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_6_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %ymm29,%ymm7,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_uktkzFjovqcxfqp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_uktkzFjovqcxfqp .L_small_initial_partial_block_uktkzFjovqcxfqp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_uktkzFjovqcxfqp: orq %r8,%r8 je .L_after_reduction_uktkzFjovqcxfqp vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_uktkzFjovqcxfqp: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_7_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_giliDfyAgzgDsqz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_giliDfyAgzgDsqz .L_small_initial_partial_block_giliDfyAgzgDsqz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_giliDfyAgzgDsqz: orq %r8,%r8 je .L_after_reduction_giliDfyAgzgDsqz vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_giliDfyAgzgDsqz: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_8_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_DjnECqEweilEAGu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_DjnECqEweilEAGu .L_small_initial_partial_block_DjnECqEweilEAGu: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_DjnECqEweilEAGu: orq %r8,%r8 je .L_after_reduction_DjnECqEweilEAGu vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_DjnECqEweilEAGu: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_9_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %xmm29,%xmm10,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kgxaGkfnalAmrwz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kgxaGkfnalAmrwz .L_small_initial_partial_block_kgxaGkfnalAmrwz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kgxaGkfnalAmrwz: orq %r8,%r8 je .L_after_reduction_kgxaGkfnalAmrwz vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_kgxaGkfnalAmrwz: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_10_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %ymm29,%ymm10,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BCvcswkitbgmjFe subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BCvcswkitbgmjFe .L_small_initial_partial_block_BCvcswkitbgmjFe: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BCvcswkitbgmjFe: orq %r8,%r8 je .L_after_reduction_BCvcswkitbgmjFe vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_BCvcswkitbgmjFe: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_11_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xxiyEjrxujqtjjz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xxiyEjrxujqtjjz .L_small_initial_partial_block_xxiyEjrxujqtjjz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xxiyEjrxujqtjjz: orq %r8,%r8 je .L_after_reduction_xxiyEjrxujqtjjz vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_xxiyEjrxujqtjjz: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_12_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_jAwfsnuhpsyacia subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_jAwfsnuhpsyacia .L_small_initial_partial_block_jAwfsnuhpsyacia: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_jAwfsnuhpsyacia: orq %r8,%r8 je .L_after_reduction_jAwfsnuhpsyacia vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_jAwfsnuhpsyacia: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_13_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %xmm29,%xmm11,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iimAEdydkqcfzCi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iimAEdydkqcfzCi .L_small_initial_partial_block_iimAEdydkqcfzCi: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iimAEdydkqcfzCi: orq %r8,%r8 je .L_after_reduction_iimAEdydkqcfzCi vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_iimAEdydkqcfzCi: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_14_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %ymm29,%ymm11,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_cflpryEedqzCjvl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_cflpryEedqzCjvl .L_small_initial_partial_block_cflpryEedqzCjvl: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cflpryEedqzCjvl: orq %r8,%r8 je .L_after_reduction_cflpryEedqzCjvl vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_cflpryEedqzCjvl: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_15_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_pFbDADbCplEDFfw subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pFbDADbCplEDFfw .L_small_initial_partial_block_pFbDADbCplEDFfw: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pFbDADbCplEDFfw: orq %r8,%r8 je .L_after_reduction_pFbDADbCplEDFfw vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_pFbDADbCplEDFfw: jmp .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn .L_small_initial_num_blocks_is_16_sFoDGktxtpnDmhn: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_pGqEmoznEqGhujq: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pGqEmoznEqGhujq: vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_pGqEmoznEqGhujq: .L_small_initial_blocks_encrypted_sFoDGktxtpnDmhn: .L_ghash_done_efvnrtvwAsfehEC: vmovdqu64 %xmm2,0(%rsi) .L_enc_dec_done_efvnrtvwAsfehEC: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) .L_enc_dec_abort_efvnrtvwAsfehEC: jmp .Lexit_gcm_decrypt .align 32 .Laes_gcm_decrypt_256_avx512: orq %r8,%r8 je .L_enc_dec_abort_keEetjmxflGqBfv xorq %r14,%r14 vmovdqu64 64(%rsi),%xmm14 vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 movl (%rdx),%eax orq %rax,%rax je .L_partial_block_done_fhsskwCeFatEtrh movl $16,%r10d leaq byte_len_to_mask_table(%rip),%r12 cmpq %r10,%r8 cmovcq %r8,%r10 kmovw (%r12,%r10,2),%k1 vmovdqu8 (%rcx),%xmm0{%k1}{z} vmovdqu64 16(%rsi),%xmm3 leaq 80(%rsi),%r10 vmovdqu64 240(%r10),%xmm4 leaq SHIFT_MASK(%rip),%r12 addq %rax,%r12 vmovdqu64 (%r12),%xmm5 vpshufb %xmm5,%xmm3,%xmm3 vmovdqa64 %xmm0,%xmm6 vpxorq %xmm0,%xmm3,%xmm3 leaq (%r8,%rax,1),%r13 subq $16,%r13 jge .L_no_extra_mask_fhsskwCeFatEtrh subq %r13,%r12 .L_no_extra_mask_fhsskwCeFatEtrh: vmovdqu64 16(%r12),%xmm0 vpand %xmm0,%xmm3,%xmm3 vpand %xmm0,%xmm6,%xmm6 vpshufb SHUF_MASK(%rip),%xmm6,%xmm6 vpshufb %xmm5,%xmm6,%xmm6 vpxorq %xmm6,%xmm14,%xmm14 cmpq $0,%r13 jl .L_partial_incomplete_fhsskwCeFatEtrh .byte 98,243,13,8,68,252,17 .byte 98,115,13,8,68,212,0 .byte 98,115,13,8,68,220,1 .byte 98,115,13,8,68,244,16 vpxorq %xmm11,%xmm14,%xmm14 vpsrldq $8,%xmm14,%xmm11 vpslldq $8,%xmm14,%xmm14 vpxorq %xmm11,%xmm7,%xmm7 vpxorq %xmm10,%xmm14,%xmm14 vmovdqu64 POLY2(%rip),%xmm11 .byte 98,83,37,8,68,214,1 vpslldq $8,%xmm10,%xmm10 vpxorq %xmm10,%xmm14,%xmm14 .byte 98,83,37,8,68,214,0 vpsrldq $4,%xmm10,%xmm10 .byte 98,83,37,8,68,246,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm10,%xmm7,%xmm14 movl $0,(%rdx) movq %rax,%r12 movq $16,%rax subq %r12,%rax jmp .L_enc_dec_done_fhsskwCeFatEtrh .L_partial_incomplete_fhsskwCeFatEtrh: addl %r8d,(%rdx) movq %r8,%rax .L_enc_dec_done_fhsskwCeFatEtrh: leaq byte_len_to_mask_table(%rip),%r12 kmovw (%r12,%rax,2),%k1 movq %r9,%r12 vmovdqu8 %xmm3,(%r12){%k1} .L_partial_block_done_fhsskwCeFatEtrh: vmovdqu64 0(%rsi),%xmm2 subq %rax,%r8 je .L_enc_dec_done_keEetjmxflGqBfv cmpq $256,%r8 jbe .L_message_below_equal_16_blocks_keEetjmxflGqBfv vmovdqa64 SHUF_MASK(%rip),%zmm29 vmovdqa64 ddq_addbe_4444(%rip),%zmm27 vmovdqa64 ddq_addbe_1234(%rip),%zmm28 vmovd %xmm2,%r15d andl $255,%r15d vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpshufb %zmm29,%zmm2,%zmm2 cmpb $240,%r15b jae .L_next_16_overflow_tpefFeFucnbumCh vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_tpefFeFucnbumCh .L_next_16_overflow_tpefFeFucnbumCh: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_tpefFeFucnbumCh: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 0(%rcx,%rax,1),%zmm0 vmovdqu8 64(%rcx,%rax,1),%zmm3 vmovdqu8 128(%rcx,%rax,1),%zmm4 vmovdqu8 192(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 208(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 224(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,0(%r10,%rax,1) vmovdqu8 %zmm10,64(%r10,%rax,1) vmovdqu8 %zmm11,128(%r10,%rax,1) vmovdqu8 %zmm12,192(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,768(%rsp) vmovdqa64 %zmm10,832(%rsp) vmovdqa64 %zmm11,896(%rsp) vmovdqa64 %zmm12,960(%rsp) leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_iDAhziwpjqoADaj vmovdqu64 192(%r12),%zmm0 vmovdqu64 %zmm0,704(%rsp) vmovdqu64 128(%r12),%zmm3 vmovdqu64 %zmm3,640(%rsp) vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 64(%r12),%zmm4 vmovdqu64 %zmm4,576(%rsp) vmovdqu64 0(%r12),%zmm5 vmovdqu64 %zmm5,512(%rsp) .L_skip_hkeys_precomputation_iDAhziwpjqoADaj: cmpq $512,%r8 jb .L_message_below_32_blocks_keEetjmxflGqBfv cmpb $240,%r15b jae .L_next_16_overflow_qgCCeDpdgxsjtxo vpaddd %zmm28,%zmm2,%zmm7 vpaddd %zmm27,%zmm7,%zmm10 vpaddd %zmm27,%zmm10,%zmm11 vpaddd %zmm27,%zmm11,%zmm12 jmp .L_next_16_ok_qgCCeDpdgxsjtxo .L_next_16_overflow_qgCCeDpdgxsjtxo: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm12 vpaddd ddq_add_1234(%rip),%zmm2,%zmm7 vpaddd %zmm12,%zmm7,%zmm10 vpaddd %zmm12,%zmm10,%zmm11 vpaddd %zmm12,%zmm11,%zmm12 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vpshufb %zmm29,%zmm12,%zmm12 .L_next_16_ok_qgCCeDpdgxsjtxo: vshufi64x2 $255,%zmm12,%zmm12,%zmm2 addb $16,%r15b vmovdqu8 256(%rcx,%rax,1),%zmm0 vmovdqu8 320(%rcx,%rax,1),%zmm3 vmovdqu8 384(%rcx,%rax,1),%zmm4 vmovdqu8 448(%rcx,%rax,1),%zmm5 vbroadcastf64x2 0(%rdi),%zmm6 vpxorq %zmm6,%zmm7,%zmm7 vpxorq %zmm6,%zmm10,%zmm10 vpxorq %zmm6,%zmm11,%zmm11 vpxorq %zmm6,%zmm12,%zmm12 vbroadcastf64x2 16(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 32(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 48(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 64(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 80(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 96(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 112(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 128(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 144(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 160(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 176(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 192(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 208(%rdi),%zmm6 .byte 98,242,69,72,220,254 .byte 98,114,45,72,220,214 .byte 98,114,37,72,220,222 .byte 98,114,29,72,220,230 vbroadcastf64x2 224(%rdi),%zmm6 .byte 98,242,69,72,221,254 .byte 98,114,45,72,221,214 .byte 98,114,37,72,221,222 .byte 98,114,29,72,221,230 vpxorq %zmm0,%zmm7,%zmm7 vpxorq %zmm3,%zmm10,%zmm10 vpxorq %zmm4,%zmm11,%zmm11 vpxorq %zmm5,%zmm12,%zmm12 movq %r9,%r10 vmovdqu8 %zmm7,256(%r10,%rax,1) vmovdqu8 %zmm10,320(%r10,%rax,1) vmovdqu8 %zmm11,384(%r10,%rax,1) vmovdqu8 %zmm12,448(%r10,%rax,1) vpshufb %zmm29,%zmm0,%zmm7 vpshufb %zmm29,%zmm3,%zmm10 vpshufb %zmm29,%zmm4,%zmm11 vpshufb %zmm29,%zmm5,%zmm12 vmovdqa64 %zmm7,1024(%rsp) vmovdqa64 %zmm10,1088(%rsp) vmovdqa64 %zmm11,1152(%rsp) vmovdqa64 %zmm12,1216(%rsp) testq %r14,%r14 jnz .L_skip_hkeys_precomputation_ErxbfranEhsBGhe vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,192(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,128(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,64(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,0(%rsp) .L_skip_hkeys_precomputation_ErxbfranEhsBGhe: movq $1,%r14 addq $512,%rax subq $512,%r8 cmpq $768,%r8 jb .L_no_more_big_nblocks_keEetjmxflGqBfv .L_encrypt_big_nblocks_keEetjmxflGqBfv: cmpb $240,%r15b jae .L_16_blocks_overflow_budzEysnblsjtjq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_budzEysnblsjtjq .L_16_blocks_overflow_budzEysnblsjtjq: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_budzEysnblsjtjq: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_ojhGelucjaDDiwh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_ojhGelucjaDDiwh .L_16_blocks_overflow_ojhGelucjaDDiwh: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_ojhGelucjaDDiwh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_jpAfkEctagbyfkB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_jpAfkEctagbyfkB .L_16_blocks_overflow_jpAfkEctagbyfkB: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_jpAfkEctagbyfkB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 512(%rcx,%rax,1),%zmm17 vmovdqu8 576(%rcx,%rax,1),%zmm19 vmovdqu8 640(%rcx,%rax,1),%zmm20 vmovdqu8 704(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpternlogq $0x96,%zmm15,%zmm12,%zmm6 vpxorq %zmm24,%zmm6,%zmm6 vpternlogq $0x96,%zmm10,%zmm13,%zmm7 vpxorq %zmm25,%zmm7,%zmm7 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vextracti64x4 $1,%zmm6,%ymm12 vpxorq %ymm12,%ymm6,%ymm6 vextracti32x4 $1,%ymm6,%xmm12 vpxorq %xmm12,%xmm6,%xmm6 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm6 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,512(%r10,%rax,1) vmovdqu8 %zmm3,576(%r10,%rax,1) vmovdqu8 %zmm4,640(%r10,%rax,1) vmovdqu8 %zmm5,704(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1024(%rsp) vmovdqa64 %zmm3,1088(%rsp) vmovdqa64 %zmm4,1152(%rsp) vmovdqa64 %zmm5,1216(%rsp) vmovdqa64 %zmm6,%zmm14 addq $768,%rax subq $768,%r8 cmpq $768,%r8 jae .L_encrypt_big_nblocks_keEetjmxflGqBfv .L_no_more_big_nblocks_keEetjmxflGqBfv: cmpq $512,%r8 jae .L_encrypt_32_blocks_keEetjmxflGqBfv cmpq $256,%r8 jae .L_encrypt_16_blocks_keEetjmxflGqBfv .L_encrypt_0_blocks_ghash_32_keEetjmxflGqBfv: movl %r8d,%r10d andl $~15,%r10d movl $256,%ebx subl %r10d,%ebx vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 addl $256,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_CjljsCjaoxujvDg cmpl $8,%r10d je .L_last_num_blocks_is_8_CjljsCjaoxujvDg jb .L_last_num_blocks_is_7_1_CjljsCjaoxujvDg cmpl $12,%r10d je .L_last_num_blocks_is_12_CjljsCjaoxujvDg jb .L_last_num_blocks_is_11_9_CjljsCjaoxujvDg cmpl $15,%r10d je .L_last_num_blocks_is_15_CjljsCjaoxujvDg ja .L_last_num_blocks_is_16_CjljsCjaoxujvDg cmpl $14,%r10d je .L_last_num_blocks_is_14_CjljsCjaoxujvDg jmp .L_last_num_blocks_is_13_CjljsCjaoxujvDg .L_last_num_blocks_is_11_9_CjljsCjaoxujvDg: cmpl $10,%r10d je .L_last_num_blocks_is_10_CjljsCjaoxujvDg ja .L_last_num_blocks_is_11_CjljsCjaoxujvDg jmp .L_last_num_blocks_is_9_CjljsCjaoxujvDg .L_last_num_blocks_is_7_1_CjljsCjaoxujvDg: cmpl $4,%r10d je .L_last_num_blocks_is_4_CjljsCjaoxujvDg jb .L_last_num_blocks_is_3_1_CjljsCjaoxujvDg cmpl $6,%r10d ja .L_last_num_blocks_is_7_CjljsCjaoxujvDg je .L_last_num_blocks_is_6_CjljsCjaoxujvDg jmp .L_last_num_blocks_is_5_CjljsCjaoxujvDg .L_last_num_blocks_is_3_1_CjljsCjaoxujvDg: cmpl $2,%r10d ja .L_last_num_blocks_is_3_CjljsCjaoxujvDg je .L_last_num_blocks_is_2_CjljsCjaoxujvDg .L_last_num_blocks_is_1_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_BpzosFahboxovuF vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_BpzosFahboxovuF .L_16_blocks_overflow_BpzosFahboxovuF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_BpzosFahboxovuF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_dzmevElEtmlqdvB subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dzmevElEtmlqdvB .L_small_initial_partial_block_dzmevElEtmlqdvB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_dzmevElEtmlqdvB .L_small_initial_compute_done_dzmevElEtmlqdvB: .L_after_reduction_dzmevElEtmlqdvB: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_2_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_idijgbEnolbjmvb vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_idijgbEnolbjmvb .L_16_blocks_overflow_idijgbEnolbjmvb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_idijgbEnolbjmvb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zvohpFFyvnbybFD subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zvohpFFyvnbybFD .L_small_initial_partial_block_zvohpFFyvnbybFD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zvohpFFyvnbybFD: orq %r8,%r8 je .L_after_reduction_zvohpFFyvnbybFD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_zvohpFFyvnbybFD: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_3_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_wghnihbAoEsnemr vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_wghnihbAoEsnemr .L_16_blocks_overflow_wghnihbAoEsnemr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_wghnihbAoEsnemr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_pjzlkCCsFsjiBsp subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pjzlkCCsFsjiBsp .L_small_initial_partial_block_pjzlkCCsFsjiBsp: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pjzlkCCsFsjiBsp: orq %r8,%r8 je .L_after_reduction_pjzlkCCsFsjiBsp vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_pjzlkCCsFsjiBsp: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_4_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_CjzGmeouGBagvfs vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_CjzGmeouGBagvfs .L_16_blocks_overflow_CjzGmeouGBagvfs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_CjzGmeouGBagvfs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_emFtlfDdrDiyoGj subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_emFtlfDdrDiyoGj .L_small_initial_partial_block_emFtlfDdrDiyoGj: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_emFtlfDdrDiyoGj: orq %r8,%r8 je .L_after_reduction_emFtlfDdrDiyoGj vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_emFtlfDdrDiyoGj: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_5_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_DgBblneEbhavoAc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_DgBblneEbhavoAc .L_16_blocks_overflow_DgBblneEbhavoAc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_DgBblneEbhavoAc: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vowzfgidatEfBqr subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vowzfgidatEfBqr .L_small_initial_partial_block_vowzfgidatEfBqr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vowzfgidatEfBqr: orq %r8,%r8 je .L_after_reduction_vowzfgidatEfBqr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vowzfgidatEfBqr: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_6_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_sswuqofDefGijpp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_sswuqofDefGijpp .L_16_blocks_overflow_sswuqofDefGijpp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_sswuqofDefGijpp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zaEschzpbmFozoB subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zaEschzpbmFozoB .L_small_initial_partial_block_zaEschzpbmFozoB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zaEschzpbmFozoB: orq %r8,%r8 je .L_after_reduction_zaEschzpbmFozoB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_zaEschzpbmFozoB: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_7_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_lncoDbxzFvwogbC vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_lncoDbxzFvwogbC .L_16_blocks_overflow_lncoDbxzFvwogbC: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_lncoDbxzFvwogbC: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mbqrkjfyrCjFtkC subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mbqrkjfyrCjFtkC .L_small_initial_partial_block_mbqrkjfyrCjFtkC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mbqrkjfyrCjFtkC: orq %r8,%r8 je .L_after_reduction_mbqrkjfyrCjFtkC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_mbqrkjfyrCjFtkC: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_8_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_ExCdtGvwDseyezE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_ExCdtGvwDseyezE .L_16_blocks_overflow_ExCdtGvwDseyezE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_ExCdtGvwDseyezE: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ajbflDwBgvpaEcE subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ajbflDwBgvpaEcE .L_small_initial_partial_block_ajbflDwBgvpaEcE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ajbflDwBgvpaEcE: orq %r8,%r8 je .L_after_reduction_ajbflDwBgvpaEcE vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ajbflDwBgvpaEcE: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_9_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_qnvdfsmvntyhGuo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_qnvdfsmvntyhGuo .L_16_blocks_overflow_qnvdfsmvntyhGuo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_qnvdfsmvntyhGuo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_kduEkAyqanCoGvE subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_kduEkAyqanCoGvE .L_small_initial_partial_block_kduEkAyqanCoGvE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_kduEkAyqanCoGvE: orq %r8,%r8 je .L_after_reduction_kduEkAyqanCoGvE vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_kduEkAyqanCoGvE: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_10_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_ucjmDCDgtvwsblB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_ucjmDCDgtvwsblB .L_16_blocks_overflow_ucjmDCDgtvwsblB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_ucjmDCDgtvwsblB: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_grEzijkmcwkEkrv subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_grEzijkmcwkEkrv .L_small_initial_partial_block_grEzijkmcwkEkrv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_grEzijkmcwkEkrv: orq %r8,%r8 je .L_after_reduction_grEzijkmcwkEkrv vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_grEzijkmcwkEkrv: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_11_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_tGfszqdtairfiAy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_tGfszqdtairfiAy .L_16_blocks_overflow_tGfszqdtairfiAy: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_tGfszqdtairfiAy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_nyiEcniDhxadvrv subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_nyiEcniDhxadvrv .L_small_initial_partial_block_nyiEcniDhxadvrv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_nyiEcniDhxadvrv: orq %r8,%r8 je .L_after_reduction_nyiEcniDhxadvrv vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_nyiEcniDhxadvrv: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_12_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_GBxxxGGdrBGGAzv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_GBxxxGGdrBGGAzv .L_16_blocks_overflow_GBxxxGGdrBGGAzv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_GBxxxGGdrBGGAzv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vhoepgywGpbErsu subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vhoepgywGpbErsu .L_small_initial_partial_block_vhoepgywGpbErsu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vhoepgywGpbErsu: orq %r8,%r8 je .L_after_reduction_vhoepgywGpbErsu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vhoepgywGpbErsu: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_13_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_rFdlFzmcbwfmCFo vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_rFdlFzmcbwfmCFo .L_16_blocks_overflow_rFdlFzmcbwfmCFo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_rFdlFzmcbwfmCFo: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tidvwDCqozzjufl subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tidvwDCqozzjufl .L_small_initial_partial_block_tidvwDCqozzjufl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tidvwDCqozzjufl: orq %r8,%r8 je .L_after_reduction_tidvwDCqozzjufl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tidvwDCqozzjufl: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_14_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_yDllfugovhaluis vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_yDllfugovhaluis .L_16_blocks_overflow_yDllfugovhaluis: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_yDllfugovhaluis: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vGAylAjswesdfcA subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vGAylAjswesdfcA .L_small_initial_partial_block_vGAylAjswesdfcA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vGAylAjswesdfcA: orq %r8,%r8 je .L_after_reduction_vGAylAjswesdfcA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_vGAylAjswesdfcA: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_15_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_pincAkEEiiwgxGh vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_pincAkEEiiwgxGh .L_16_blocks_overflow_pincAkEEiiwgxGh: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_pincAkEEiiwgxGh: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FgsluzdCoDzfqdG subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FgsluzdCoDzfqdG .L_small_initial_partial_block_FgsluzdCoDzfqdG: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FgsluzdCoDzfqdG: orq %r8,%r8 je .L_after_reduction_FgsluzdCoDzfqdG vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FgsluzdCoDzfqdG: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_16_CjljsCjaoxujvDg: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_dBDAoEoFjhwcanb vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_dBDAoEoFjhwcanb .L_16_blocks_overflow_dBDAoEoFjhwcanb: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_dBDAoEoFjhwcanb: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm14,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_fdoxuvdoEsDrnFi: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_fdoxuvdoEsDrnFi: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_fdoxuvdoEsDrnFi: jmp .L_last_blocks_done_CjljsCjaoxujvDg .L_last_num_blocks_is_0_CjljsCjaoxujvDg: vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_CjljsCjaoxujvDg: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_keEetjmxflGqBfv .L_encrypt_32_blocks_keEetjmxflGqBfv: cmpb $240,%r15b jae .L_16_blocks_overflow_wovDjxgtezsaCbn vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_wovDjxgtezsaCbn .L_16_blocks_overflow_wovDjxgtezsaCbn: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_wovDjxgtezsaCbn: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) cmpb $240,%r15b jae .L_16_blocks_overflow_qraoeizxDFojkGy vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_qraoeizxDFojkGy .L_16_blocks_overflow_qraoeizxDFojkGy: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_qraoeizxDFojkGy: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1024(%rsp),%zmm8 vmovdqu64 256(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 320(%rsp),%zmm18 vmovdqa64 1088(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 384(%rsp),%zmm1 vmovdqa64 1152(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 448(%rsp),%zmm18 vmovdqa64 1216(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 256(%rcx,%rax,1),%zmm17 vmovdqu8 320(%rcx,%rax,1),%zmm19 vmovdqu8 384(%rcx,%rax,1),%zmm20 vmovdqu8 448(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm15,%zmm10,%zmm26 vpternlogq $0x96,%zmm12,%zmm6,%zmm24 vpternlogq $0x96,%zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,256(%r10,%rax,1) vmovdqu8 %zmm3,320(%r10,%rax,1) vmovdqu8 %zmm4,384(%r10,%rax,1) vmovdqu8 %zmm5,448(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,768(%rsp) vmovdqa64 %zmm3,832(%rsp) vmovdqa64 %zmm4,896(%rsp) vmovdqa64 %zmm5,960(%rsp) vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 subq $512,%r8 addq $512,%rax movl %r8d,%r10d andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_CafpBFgwEozfiCz cmpl $8,%r10d je .L_last_num_blocks_is_8_CafpBFgwEozfiCz jb .L_last_num_blocks_is_7_1_CafpBFgwEozfiCz cmpl $12,%r10d je .L_last_num_blocks_is_12_CafpBFgwEozfiCz jb .L_last_num_blocks_is_11_9_CafpBFgwEozfiCz cmpl $15,%r10d je .L_last_num_blocks_is_15_CafpBFgwEozfiCz ja .L_last_num_blocks_is_16_CafpBFgwEozfiCz cmpl $14,%r10d je .L_last_num_blocks_is_14_CafpBFgwEozfiCz jmp .L_last_num_blocks_is_13_CafpBFgwEozfiCz .L_last_num_blocks_is_11_9_CafpBFgwEozfiCz: cmpl $10,%r10d je .L_last_num_blocks_is_10_CafpBFgwEozfiCz ja .L_last_num_blocks_is_11_CafpBFgwEozfiCz jmp .L_last_num_blocks_is_9_CafpBFgwEozfiCz .L_last_num_blocks_is_7_1_CafpBFgwEozfiCz: cmpl $4,%r10d je .L_last_num_blocks_is_4_CafpBFgwEozfiCz jb .L_last_num_blocks_is_3_1_CafpBFgwEozfiCz cmpl $6,%r10d ja .L_last_num_blocks_is_7_CafpBFgwEozfiCz je .L_last_num_blocks_is_6_CafpBFgwEozfiCz jmp .L_last_num_blocks_is_5_CafpBFgwEozfiCz .L_last_num_blocks_is_3_1_CafpBFgwEozfiCz: cmpl $2,%r10d ja .L_last_num_blocks_is_3_CafpBFgwEozfiCz je .L_last_num_blocks_is_2_CafpBFgwEozfiCz .L_last_num_blocks_is_1_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_fekfutzigacvqDc vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_fekfutzigacvqDc .L_16_blocks_overflow_fekfutzigacvqDc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_fekfutzigacvqDc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_BqCzdBwrfgovfqg subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BqCzdBwrfgovfqg .L_small_initial_partial_block_BqCzdBwrfgovfqg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_BqCzdBwrfgovfqg .L_small_initial_compute_done_BqCzdBwrfgovfqg: .L_after_reduction_BqCzdBwrfgovfqg: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_2_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_zEwleqntmDxAeyd vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_zEwleqntmDxAeyd .L_16_blocks_overflow_zEwleqntmDxAeyd: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_zEwleqntmDxAeyd: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_eohifjbpuerrzyg subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_eohifjbpuerrzyg .L_small_initial_partial_block_eohifjbpuerrzyg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_eohifjbpuerrzyg: orq %r8,%r8 je .L_after_reduction_eohifjbpuerrzyg vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_eohifjbpuerrzyg: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_3_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_acnffEtijrEjnxo vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_acnffEtijrEjnxo .L_16_blocks_overflow_acnffEtijrEjnxo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_acnffEtijrEjnxo: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_doyzohBGtCkjnqc subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_doyzohBGtCkjnqc .L_small_initial_partial_block_doyzohBGtCkjnqc: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_doyzohBGtCkjnqc: orq %r8,%r8 je .L_after_reduction_doyzohBGtCkjnqc vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_doyzohBGtCkjnqc: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_4_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_uGhvhwlitDofjoE vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_uGhvhwlitDofjoE .L_16_blocks_overflow_uGhvhwlitDofjoE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_uGhvhwlitDofjoE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_CsCwmBEowahhzih subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_CsCwmBEowahhzih .L_small_initial_partial_block_CsCwmBEowahhzih: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_CsCwmBEowahhzih: orq %r8,%r8 je .L_after_reduction_CsCwmBEowahhzih vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_CsCwmBEowahhzih: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_5_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_BwnlahcxoBDAelu vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_BwnlahcxoBDAelu .L_16_blocks_overflow_BwnlahcxoBDAelu: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_BwnlahcxoBDAelu: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rBivbBgEnqzuoau subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rBivbBgEnqzuoau .L_small_initial_partial_block_rBivbBgEnqzuoau: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rBivbBgEnqzuoau: orq %r8,%r8 je .L_after_reduction_rBivbBgEnqzuoau vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rBivbBgEnqzuoau: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_6_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_ymfljrqweowoCvG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_ymfljrqweowoCvG .L_16_blocks_overflow_ymfljrqweowoCvG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_ymfljrqweowoCvG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_Dlbqsuajgnhvlny subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_Dlbqsuajgnhvlny .L_small_initial_partial_block_Dlbqsuajgnhvlny: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_Dlbqsuajgnhvlny: orq %r8,%r8 je .L_after_reduction_Dlbqsuajgnhvlny vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_Dlbqsuajgnhvlny: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_7_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_ijxrtlxzmzgCbiE vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_ijxrtlxzmzgCbiE .L_16_blocks_overflow_ijxrtlxzmzgCbiE: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_ijxrtlxzmzgCbiE: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_Fpgnkfiyboaddsm subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_Fpgnkfiyboaddsm .L_small_initial_partial_block_Fpgnkfiyboaddsm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_Fpgnkfiyboaddsm: orq %r8,%r8 je .L_after_reduction_Fpgnkfiyboaddsm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_Fpgnkfiyboaddsm: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_8_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_zzfxscwhyoakGqc vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_zzfxscwhyoakGqc .L_16_blocks_overflow_zzfxscwhyoakGqc: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_zzfxscwhyoakGqc: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xBdugFFrnyriCBE subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xBdugFFrnyriCBE .L_small_initial_partial_block_xBdugFFrnyriCBE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xBdugFFrnyriCBE: orq %r8,%r8 je .L_after_reduction_xBdugFFrnyriCBE vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_xBdugFFrnyriCBE: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_9_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_hswtkcnEneBfnil vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_hswtkcnEneBfnil .L_16_blocks_overflow_hswtkcnEneBfnil: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_hswtkcnEneBfnil: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_lgbbvgiAttomlsy subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_lgbbvgiAttomlsy .L_small_initial_partial_block_lgbbvgiAttomlsy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_lgbbvgiAttomlsy: orq %r8,%r8 je .L_after_reduction_lgbbvgiAttomlsy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_lgbbvgiAttomlsy: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_10_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_EBzDixsnrGlAsGi vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_EBzDixsnrGlAsGi .L_16_blocks_overflow_EBzDixsnrGlAsGi: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_EBzDixsnrGlAsGi: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xdsDxBzahxmzysb subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xdsDxBzahxmzysb .L_small_initial_partial_block_xdsDxBzahxmzysb: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xdsDxBzahxmzysb: orq %r8,%r8 je .L_after_reduction_xdsDxBzahxmzysb vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_xdsDxBzahxmzysb: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_11_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_qyEwjvzrfEfrwlG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_qyEwjvzrfEfrwlG .L_16_blocks_overflow_qyEwjvzrfEfrwlG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_qyEwjvzrfEfrwlG: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_cmfyhuncjqoAhuh subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_cmfyhuncjqoAhuh .L_small_initial_partial_block_cmfyhuncjqoAhuh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cmfyhuncjqoAhuh: orq %r8,%r8 je .L_after_reduction_cmfyhuncjqoAhuh vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_cmfyhuncjqoAhuh: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_12_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_bcstjouersAefmz vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_bcstjouersAefmz .L_16_blocks_overflow_bcstjouersAefmz: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_bcstjouersAefmz: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iFhieggherswFAm subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iFhieggherswFAm .L_small_initial_partial_block_iFhieggherswFAm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iFhieggherswFAm: orq %r8,%r8 je .L_after_reduction_iFhieggherswFAm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iFhieggherswFAm: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_13_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_rymwDrficveEDaj vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_rymwDrficveEDaj .L_16_blocks_overflow_rymwDrficveEDaj: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_rymwDrficveEDaj: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_aevCxqqBBnzfjmB subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_aevCxqqBBnzfjmB .L_small_initial_partial_block_aevCxqqBBnzfjmB: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_aevCxqqBBnzfjmB: orq %r8,%r8 je .L_after_reduction_aevCxqqBBnzfjmB vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_aevCxqqBBnzfjmB: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_14_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_kzfnwbigglfewrl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_kzfnwbigglfewrl .L_16_blocks_overflow_kzfnwbigglfewrl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_kzfnwbigglfewrl: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_BirmupqDcbxwtda subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_BirmupqDcbxwtda .L_small_initial_partial_block_BirmupqDcbxwtda: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_BirmupqDcbxwtda: orq %r8,%r8 je .L_after_reduction_BirmupqDcbxwtda vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_BirmupqDcbxwtda: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_15_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_zpEbDAveGDqklle vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_zpEbDAveGDqklle .L_16_blocks_overflow_zpEbDAveGDqklle: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_zpEbDAveGDqklle: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_Djympovkdexblck subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_Djympovkdexblck .L_small_initial_partial_block_Djympovkdexblck: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_Djympovkdexblck: orq %r8,%r8 je .L_after_reduction_Djympovkdexblck vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_Djympovkdexblck: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_16_CafpBFgwEozfiCz: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_bjFGibBdktCEryt vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_bjFGibBdktCEryt .L_16_blocks_overflow_bjFGibBdktCEryt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_bjFGibBdktCEryt: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_pBmCpEpokBigCud: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pBmCpEpokBigCud: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_pBmCpEpokBigCud: jmp .L_last_blocks_done_CafpBFgwEozfiCz .L_last_num_blocks_is_0_CafpBFgwEozfiCz: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_CafpBFgwEozfiCz: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_keEetjmxflGqBfv .L_encrypt_16_blocks_keEetjmxflGqBfv: cmpb $240,%r15b jae .L_16_blocks_overflow_wmtckzeadccoCgk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_wmtckzeadccoCgk .L_16_blocks_overflow_wmtckzeadccoCgk: vpshufb %zmm29,%zmm2,%zmm2 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_wmtckzeadccoCgk: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp),%zmm1 vshufi64x2 $255,%zmm5,%zmm5,%zmm2 addb $16,%r15b vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,243,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm6 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm15,%zmm10,%zmm26 vpxorq %zmm12,%zmm6,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1) vpshufb %zmm29,%zmm17,%zmm0 vpshufb %zmm29,%zmm19,%zmm3 vpshufb %zmm29,%zmm20,%zmm4 vpshufb %zmm29,%zmm21,%zmm5 vmovdqa64 %zmm0,1280(%rsp) vmovdqa64 %zmm3,1344(%rsp) vmovdqa64 %zmm4,1408(%rsp) vmovdqa64 %zmm5,1472(%rsp) vmovdqa64 1024(%rsp),%zmm13 vmovdqu64 256(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1088(%rsp),%zmm13 vmovdqu64 320(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1152(%rsp),%zmm13 vmovdqu64 384(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1216(%rsp),%zmm13 vmovdqu64 448(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 subq $256,%r8 addq $256,%rax movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_adnhFzpjBkEnjlu cmpl $8,%r10d je .L_last_num_blocks_is_8_adnhFzpjBkEnjlu jb .L_last_num_blocks_is_7_1_adnhFzpjBkEnjlu cmpl $12,%r10d je .L_last_num_blocks_is_12_adnhFzpjBkEnjlu jb .L_last_num_blocks_is_11_9_adnhFzpjBkEnjlu cmpl $15,%r10d je .L_last_num_blocks_is_15_adnhFzpjBkEnjlu ja .L_last_num_blocks_is_16_adnhFzpjBkEnjlu cmpl $14,%r10d je .L_last_num_blocks_is_14_adnhFzpjBkEnjlu jmp .L_last_num_blocks_is_13_adnhFzpjBkEnjlu .L_last_num_blocks_is_11_9_adnhFzpjBkEnjlu: cmpl $10,%r10d je .L_last_num_blocks_is_10_adnhFzpjBkEnjlu ja .L_last_num_blocks_is_11_adnhFzpjBkEnjlu jmp .L_last_num_blocks_is_9_adnhFzpjBkEnjlu .L_last_num_blocks_is_7_1_adnhFzpjBkEnjlu: cmpl $4,%r10d je .L_last_num_blocks_is_4_adnhFzpjBkEnjlu jb .L_last_num_blocks_is_3_1_adnhFzpjBkEnjlu cmpl $6,%r10d ja .L_last_num_blocks_is_7_adnhFzpjBkEnjlu je .L_last_num_blocks_is_6_adnhFzpjBkEnjlu jmp .L_last_num_blocks_is_5_adnhFzpjBkEnjlu .L_last_num_blocks_is_3_1_adnhFzpjBkEnjlu: cmpl $2,%r10d ja .L_last_num_blocks_is_3_adnhFzpjBkEnjlu je .L_last_num_blocks_is_2_adnhFzpjBkEnjlu .L_last_num_blocks_is_1_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_GsxmuksbpmpGjAF vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_GsxmuksbpmpGjAF .L_16_blocks_overflow_GsxmuksbpmpGjAF: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_GsxmuksbpmpGjAF: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,8,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_fkgElsvknyCFraE subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_fkgElsvknyCFraE .L_small_initial_partial_block_fkgElsvknyCFraE: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_fkgElsvknyCFraE .L_small_initial_compute_done_fkgElsvknyCFraE: .L_after_reduction_fkgElsvknyCFraE: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_2_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_EjdqvCnEusieimt vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_EjdqvCnEusieimt .L_16_blocks_overflow_EjdqvCnEusieimt: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_EjdqvCnEusieimt: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,40,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rouAoqaCpdDxjzF subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rouAoqaCpdDxjzF .L_small_initial_partial_block_rouAoqaCpdDxjzF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rouAoqaCpdDxjzF: orq %r8,%r8 je .L_after_reduction_rouAoqaCpdDxjzF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_rouAoqaCpdDxjzF: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_3_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_uctbCqtlugkklDD vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_uctbCqtlugkklDD .L_16_blocks_overflow_uctbCqtlugkklDD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_uctbCqtlugkklDD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zkAAeakisCCFqgf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zkAAeakisCCFqgf .L_small_initial_partial_block_zkAAeakisCCFqgf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zkAAeakisCCFqgf: orq %r8,%r8 je .L_after_reduction_zkAAeakisCCFqgf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_zkAAeakisCCFqgf: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_4_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_gaqeqvovBwleCnk vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_gaqeqvovBwleCnk .L_16_blocks_overflow_gaqeqvovBwleCnk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_gaqeqvovBwleCnk: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_qihCqAlqxdsjyzm subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_qihCqAlqxdsjyzm .L_small_initial_partial_block_qihCqAlqxdsjyzm: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_qihCqAlqxdsjyzm: orq %r8,%r8 je .L_after_reduction_qihCqAlqxdsjyzm vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_qihCqAlqxdsjyzm: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_5_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_ocpzeCAdEaCuwqG vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_ocpzeCAdEaCuwqG .L_16_blocks_overflow_ocpzeCAdEaCuwqG: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_ocpzeCAdEaCuwqG: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GgfcCeubxmwGabf subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GgfcCeubxmwGabf .L_small_initial_partial_block_GgfcCeubxmwGabf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GgfcCeubxmwGabf: orq %r8,%r8 je .L_after_reduction_GgfcCeubxmwGabf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GgfcCeubxmwGabf: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_6_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_tCpEhfGhdbguevv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_tCpEhfGhdbguevv .L_16_blocks_overflow_tCpEhfGhdbguevv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_tCpEhfGhdbguevv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EcehrEgDvGgGxlr subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EcehrEgDvGgGxlr .L_small_initial_partial_block_EcehrEgDvGgGxlr: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EcehrEgDvGgGxlr: orq %r8,%r8 je .L_after_reduction_EcehrEgDvGgGxlr vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EcehrEgDvGgGxlr: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_7_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_yhnxntsqCvqmnAv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_yhnxntsqCvqmnAv .L_16_blocks_overflow_yhnxntsqCvqmnAv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_yhnxntsqCvqmnAv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dpnDdmEjpiBlsff subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dpnDdmEjpiBlsff .L_small_initial_partial_block_dpnDdmEjpiBlsff: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dpnDdmEjpiBlsff: orq %r8,%r8 je .L_after_reduction_dpnDdmEjpiBlsff vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dpnDdmEjpiBlsff: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_8_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_qhecknjsAigbdvl vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_qhecknjsAigbdvl .L_16_blocks_overflow_qhecknjsAigbdvl: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_qhecknjsAigbdvl: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_agskGinasntEiCl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_agskGinasntEiCl .L_small_initial_partial_block_agskGinasntEiCl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_agskGinasntEiCl: orq %r8,%r8 je .L_after_reduction_agskGinasntEiCl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_agskGinasntEiCl: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_9_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_bFfGEAqbwowecqr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_bFfGEAqbwowecqr .L_16_blocks_overflow_bFfGEAqbwowecqr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_bFfGEAqbwowecqr: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xuljsjGkGjfAtFa subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xuljsjGkGjfAtFa .L_small_initial_partial_block_xuljsjGkGjfAtFa: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xuljsjGkGjfAtFa: orq %r8,%r8 je .L_after_reduction_xuljsjGkGjfAtFa vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_xuljsjGkGjfAtFa: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_10_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_rvpoAkotkmdfoGD vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_rvpoAkotkmdfoGD .L_16_blocks_overflow_rvpoAkotkmdfoGD: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_rvpoAkotkmdfoGD: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_xvxthCnBgzxznFe subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_xvxthCnBgzxznFe .L_small_initial_partial_block_xvxthCnBgzxznFe: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xvxthCnBgzxznFe: orq %r8,%r8 je .L_after_reduction_xvxthCnBgzxznFe vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_xvxthCnBgzxznFe: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_11_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_vfjpDwaAwwnfAie vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_vfjpDwaAwwnfAie .L_16_blocks_overflow_vfjpDwaAwwnfAie: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_vfjpDwaAwwnfAie: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_aBbqBjAzrxyDsyu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_aBbqBjAzrxyDsyu .L_small_initial_partial_block_aBbqBjAzrxyDsyu: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_aBbqBjAzrxyDsyu: orq %r8,%r8 je .L_after_reduction_aBbqBjAzrxyDsyu vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_aBbqBjAzrxyDsyu: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_12_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_sxuCEDavBFjsEdv vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_sxuCEDavBFjsEdv .L_16_blocks_overflow_sxuCEDavBFjsEdv: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_sxuCEDavBFjsEdv: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yChicojCCAAFCdn subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yChicojCCAAFCdn .L_small_initial_partial_block_yChicojCCAAFCdn: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yChicojCCAAFCdn: orq %r8,%r8 je .L_after_reduction_yChicojCCAAFCdn vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yChicojCCAAFCdn: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_13_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_qqAerGvEyeduCgs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_qqAerGvEyeduCgs .L_16_blocks_overflow_qqAerGvEyeduCgs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_qqAerGvEyeduCgs: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zkhmCnldAfcumwl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zkhmCnldAfcumwl .L_small_initial_partial_block_zkhmCnldAfcumwl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zkhmCnldAfcumwl: orq %r8,%r8 je .L_after_reduction_zkhmCnldAfcumwl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_zkhmCnldAfcumwl: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_14_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_tiwfklfdCbEnvFe vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_tiwfklfdCbEnvFe .L_16_blocks_overflow_tiwfklfdCbEnvFe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_tiwfklfdCbEnvFe: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_fFfrqpdqbcvGzmv subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_fFfrqpdqbcvGzmv .L_small_initial_partial_block_fFfrqpdqbcvGzmv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_fFfrqpdqbcvGzmv: orq %r8,%r8 je .L_after_reduction_fFfrqpdqbcvGzmv vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_fFfrqpdqbcvGzmv: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_15_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_BatgsGhBnhqnqnx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_BatgsGhBnhqnqnx .L_16_blocks_overflow_BatgsGhBnhqnqnx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_BatgsGhBnhqnqnx: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ysbBlvhzxEdeEFl subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ysbBlvhzxEdeEFl .L_small_initial_partial_block_ysbBlvhzxEdeEFl: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ysbBlvhzxEdeEFl: orq %r8,%r8 je .L_after_reduction_ysbBlvhzxEdeEFl vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_ysbBlvhzxEdeEFl: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_16_adnhFzpjBkEnjlu: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_CuxvqEazAfGjsCp vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_CuxvqEazAfGjsCp .L_16_blocks_overflow_CuxvqEazAfGjsCp: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_CuxvqEazAfGjsCp: vbroadcastf64x2 0(%rdi),%zmm30 vmovdqa64 1280(%rsp),%zmm8 vmovdqu64 512(%rsp),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 576(%rsp),%zmm18 vmovdqa64 1344(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 640(%rsp),%zmm1 vmovdqa64 1408(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 704(%rsp),%zmm18 vmovdqa64 1472(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpternlogq $0x96,%zmm12,%zmm24,%zmm14 vpternlogq $0x96,%zmm13,%zmm25,%zmm7 vpternlogq $0x96,%zmm15,%zmm26,%zmm10 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 vpsrldq $8,%zmm10,%zmm15 vpslldq $8,%zmm10,%zmm10 vmovdqa64 POLY2(%rip),%xmm16 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 vpxorq %zmm15,%zmm14,%zmm14 vpxorq %zmm10,%zmm7,%zmm7 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vextracti64x4 $1,%zmm14,%ymm12 vpxorq %ymm12,%ymm14,%ymm14 vextracti32x4 $1,%ymm14,%xmm12 vpxorq %xmm12,%xmm14,%xmm14 vextracti64x4 $1,%zmm7,%ymm13 vpxorq %ymm13,%ymm7,%ymm7 vextracti32x4 $1,%ymm7,%xmm13 vpxorq %xmm13,%xmm7,%xmm7 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,115,125,0,68,239,1 vpslldq $8,%xmm13,%xmm13 vpxorq %xmm13,%xmm7,%xmm13 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,83,125,0,68,229,0 vpsrldq $4,%xmm12,%xmm12 .byte 98,83,125,0,68,253,16 vpslldq $4,%xmm15,%xmm15 vpternlogq $0x96,%xmm12,%xmm15,%xmm14 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_cGvBxlvhpkhxlhv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpxorq %zmm14,%zmm17,%zmm17 vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm31,%zmm5,%zmm5 vpxorq %zmm8,%zmm0,%zmm0 vpxorq %zmm22,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cGvBxlvhpkhxlhv: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_cGvBxlvhpkhxlhv: jmp .L_last_blocks_done_adnhFzpjBkEnjlu .L_last_num_blocks_is_0_adnhFzpjBkEnjlu: vmovdqa64 1280(%rsp),%zmm13 vmovdqu64 512(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1344(%rsp),%zmm13 vmovdqu64 576(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 1408(%rsp),%zmm13 vmovdqu64 640(%rsp),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 1472(%rsp),%zmm13 vmovdqu64 704(%rsp),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_adnhFzpjBkEnjlu: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_keEetjmxflGqBfv .L_message_below_32_blocks_keEetjmxflGqBfv: subq $256,%r8 addq $256,%rax movl %r8d,%r10d leaq 80(%rsi),%r12 testq %r14,%r14 jnz .L_skip_hkeys_precomputation_wDAhpcxxDdecsFn vmovdqu64 640(%rsp),%zmm3 vshufi64x2 $0x00,%zmm3,%zmm3,%zmm3 vmovdqu64 576(%rsp),%zmm4 vmovdqu64 512(%rsp),%zmm5 .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,448(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,384(%rsp) .byte 98,243,93,72,68,243,17 .byte 98,243,93,72,68,251,0 .byte 98,115,93,72,68,211,1 .byte 98,243,93,72,68,227,16 vpxorq %zmm10,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm10 vpslldq $8,%zmm4,%zmm4 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm4,%zmm4 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,252,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm4,%zmm4 .byte 98,243,45,72,68,252,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,228,16 vpslldq $4,%zmm4,%zmm4 vpternlogq $0x96,%zmm7,%zmm6,%zmm4 vmovdqu64 %zmm4,320(%rsp) .byte 98,243,85,72,68,243,17 .byte 98,243,85,72,68,251,0 .byte 98,115,85,72,68,211,1 .byte 98,243,85,72,68,235,16 vpxorq %zmm10,%zmm5,%zmm5 vpsrldq $8,%zmm5,%zmm10 vpslldq $8,%zmm5,%zmm5 vpxorq %zmm10,%zmm6,%zmm6 vpxorq %zmm7,%zmm5,%zmm5 vmovdqu64 POLY2(%rip),%zmm10 .byte 98,243,45,72,68,253,1 vpslldq $8,%zmm7,%zmm7 vpxorq %zmm7,%zmm5,%zmm5 .byte 98,243,45,72,68,253,0 vpsrldq $4,%zmm7,%zmm7 .byte 98,243,45,72,68,237,16 vpslldq $4,%zmm5,%zmm5 vpternlogq $0x96,%zmm7,%zmm6,%zmm5 vmovdqu64 %zmm5,256(%rsp) .L_skip_hkeys_precomputation_wDAhpcxxDdecsFn: movq $1,%r14 andl $~15,%r10d movl $512,%ebx subl %r10d,%ebx movl %r8d,%r10d addl $15,%r10d shrl $4,%r10d je .L_last_num_blocks_is_0_uGbbotznadbtwnB cmpl $8,%r10d je .L_last_num_blocks_is_8_uGbbotznadbtwnB jb .L_last_num_blocks_is_7_1_uGbbotznadbtwnB cmpl $12,%r10d je .L_last_num_blocks_is_12_uGbbotznadbtwnB jb .L_last_num_blocks_is_11_9_uGbbotznadbtwnB cmpl $15,%r10d je .L_last_num_blocks_is_15_uGbbotznadbtwnB ja .L_last_num_blocks_is_16_uGbbotznadbtwnB cmpl $14,%r10d je .L_last_num_blocks_is_14_uGbbotznadbtwnB jmp .L_last_num_blocks_is_13_uGbbotznadbtwnB .L_last_num_blocks_is_11_9_uGbbotznadbtwnB: cmpl $10,%r10d je .L_last_num_blocks_is_10_uGbbotznadbtwnB ja .L_last_num_blocks_is_11_uGbbotznadbtwnB jmp .L_last_num_blocks_is_9_uGbbotznadbtwnB .L_last_num_blocks_is_7_1_uGbbotznadbtwnB: cmpl $4,%r10d je .L_last_num_blocks_is_4_uGbbotznadbtwnB jb .L_last_num_blocks_is_3_1_uGbbotznadbtwnB cmpl $6,%r10d ja .L_last_num_blocks_is_7_uGbbotznadbtwnB je .L_last_num_blocks_is_6_uGbbotznadbtwnB jmp .L_last_num_blocks_is_5_uGbbotznadbtwnB .L_last_num_blocks_is_3_1_uGbbotznadbtwnB: cmpl $2,%r10d ja .L_last_num_blocks_is_3_uGbbotznadbtwnB je .L_last_num_blocks_is_2_uGbbotznadbtwnB .L_last_num_blocks_is_1_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $255,%r15d jae .L_16_blocks_overflow_cChwjnmCkfzrqax vpaddd %xmm28,%xmm2,%xmm0 jmp .L_16_blocks_ok_cChwjnmCkfzrqax .L_16_blocks_overflow_cChwjnmCkfzrqax: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %xmm29,%xmm0,%xmm0 .L_16_blocks_ok_cChwjnmCkfzrqax: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %xmm30,%xmm0,%xmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,8,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,8,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%xmm17{%k1}{z} .byte 98,146,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,8,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,8,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,8,220,199 .byte 98,146,125,8,221,198 vpxorq %xmm17,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %xmm29,%xmm17,%xmm17 vextracti32x4 $0,%zmm17,%xmm7 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_EctpihchBbzjuhh subq $16,%r8 movl $0,(%rdx) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EctpihchBbzjuhh .L_small_initial_partial_block_EctpihchBbzjuhh: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm0 .byte 98,147,125,8,68,217,1 vpslldq $8,%xmm3,%xmm3 vpxorq %xmm3,%xmm25,%xmm3 .byte 98,243,125,8,68,227,0 vpsrldq $4,%xmm4,%xmm4 .byte 98,115,125,8,68,243,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm4,%xmm14 vpxorq %xmm7,%xmm14,%xmm14 jmp .L_after_reduction_EctpihchBbzjuhh .L_small_initial_compute_done_EctpihchBbzjuhh: .L_after_reduction_EctpihchBbzjuhh: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_2_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $254,%r15d jae .L_16_blocks_overflow_DqmtBvrcgAqmgdw vpaddd %ymm28,%ymm2,%ymm0 jmp .L_16_blocks_ok_DqmtBvrcgAqmgdw .L_16_blocks_overflow_DqmtBvrcgAqmgdw: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %ymm29,%ymm0,%ymm0 .L_16_blocks_ok_DqmtBvrcgAqmgdw: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %ymm30,%ymm0,%ymm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,40,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,40,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%ymm17{%k1}{z} .byte 98,146,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,40,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,40,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,40,220,199 .byte 98,146,125,40,221,198 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %ymm29,%ymm17,%ymm17 vextracti32x4 $1,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EyrkoajdjakxzkF subq $16,%r8 movl $0,(%rdx) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EyrkoajdjakxzkF .L_small_initial_partial_block_EyrkoajdjakxzkF: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 240(%r10),%xmm1 .byte 98,243,117,0,68,225,1 .byte 98,243,117,0,68,233,16 .byte 98,243,117,0,68,193,17 .byte 98,243,117,0,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EyrkoajdjakxzkF: orq %r8,%r8 je .L_after_reduction_EyrkoajdjakxzkF vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EyrkoajdjakxzkF: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_3_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $253,%r15d jae .L_16_blocks_overflow_hfDuCGGGEpbgAAo vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_hfDuCGGGEpbgAAo .L_16_blocks_overflow_hfDuCGGGEpbgAAo: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_hfDuCGGGEpbgAAo: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $2,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GfsyFzqqokxFwFx subq $16,%r8 movl $0,(%rdx) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GfsyFzqqokxFwFx .L_small_initial_partial_block_GfsyFzqqokxFwFx: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 224(%r10),%ymm1 .byte 98,243,117,32,68,225,1 .byte 98,243,117,32,68,233,16 .byte 98,243,117,32,68,193,17 .byte 98,243,117,32,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GfsyFzqqokxFwFx: orq %r8,%r8 je .L_after_reduction_GfsyFzqqokxFwFx vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_GfsyFzqqokxFwFx: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_4_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 kmovq (%r10,%r11,8),%k1 cmpl $252,%r15d jae .L_16_blocks_overflow_wxaujbwbDrFxuhe vpaddd %zmm28,%zmm2,%zmm0 jmp .L_16_blocks_ok_wxaujbwbDrFxuhe .L_16_blocks_overflow_wxaujbwbDrFxuhe: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpshufb %zmm29,%zmm0,%zmm0 .L_16_blocks_ok_wxaujbwbDrFxuhe: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm0,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17{%k1}{z} .byte 98,146,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,125,72,221,198 vpxorq %zmm17,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm17,%zmm17{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vextracti32x4 $3,%zmm17,%xmm7 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_tpadbsuBdepEgig subq $16,%r8 movl $0,(%rdx) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_tpadbsuBdepEgig .L_small_initial_partial_block_tpadbsuBdepEgig: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 vpxorq %zmm26,%zmm4,%zmm4 vpxorq %zmm24,%zmm0,%zmm0 vpxorq %zmm25,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_tpadbsuBdepEgig: orq %r8,%r8 je .L_after_reduction_tpadbsuBdepEgig vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_tpadbsuBdepEgig: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_5_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $251,%r15d jae .L_16_blocks_overflow_tEuoxeaCCDdhEFB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %xmm27,%xmm0,%xmm3 jmp .L_16_blocks_ok_tEuoxeaCCDdhEFB .L_16_blocks_overflow_tEuoxeaCCDdhEFB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 .L_16_blocks_ok_tEuoxeaCCDdhEFB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %xmm30,%xmm3,%xmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%xmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,8,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,8,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,8,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %xmm19,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %xmm29,%xmm19,%xmm19 vextracti32x4 $0,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_srvwxdEwmxFwfhg subq $16,%r8 movl $0,(%rdx) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_srvwxdEwmxFwfhg .L_small_initial_partial_block_srvwxdEwmxFwfhg: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 192(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_srvwxdEwmxFwfhg: orq %r8,%r8 je .L_after_reduction_srvwxdEwmxFwfhg vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_srvwxdEwmxFwfhg: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_6_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $250,%r15d jae .L_16_blocks_overflow_prosxFkubabgvzg vpaddd %zmm28,%zmm2,%zmm0 vpaddd %ymm27,%ymm0,%ymm3 jmp .L_16_blocks_ok_prosxFkubabgvzg .L_16_blocks_overflow_prosxFkubabgvzg: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 .L_16_blocks_ok_prosxFkubabgvzg: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %ymm30,%ymm3,%ymm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%ymm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,40,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,40,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,40,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %ymm29,%ymm19,%ymm19 vextracti32x4 $1,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hzkBexFaxhsbChs subq $16,%r8 movl $0,(%rdx) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hzkBexFaxhsbChs .L_small_initial_partial_block_hzkBexFaxhsbChs: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 176(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 240(%r10),%xmm1 .byte 98,243,101,0,68,225,1 .byte 98,243,101,0,68,233,16 .byte 98,243,101,0,68,193,17 .byte 98,243,101,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hzkBexFaxhsbChs: orq %r8,%r8 je .L_after_reduction_hzkBexFaxhsbChs vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_hzkBexFaxhsbChs: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_7_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $249,%r15d jae .L_16_blocks_overflow_aeeqyBehlbvnfnk vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_aeeqyBehlbvnfnk .L_16_blocks_overflow_aeeqyBehlbvnfnk: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_aeeqyBehlbvnfnk: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $2,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_AobapGtdiluagwq subq $16,%r8 movl $0,(%rdx) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_AobapGtdiluagwq .L_small_initial_partial_block_AobapGtdiluagwq: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 160(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 224(%r10),%ymm1 .byte 98,243,101,32,68,225,1 .byte 98,243,101,32,68,233,16 .byte 98,243,101,32,68,193,17 .byte 98,243,101,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_AobapGtdiluagwq: orq %r8,%r8 je .L_after_reduction_AobapGtdiluagwq vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_AobapGtdiluagwq: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_8_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $64,%r11 kmovq (%r10,%r11,8),%k1 cmpl $248,%r15d jae .L_16_blocks_overflow_rboylvBCxohyFxr vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 jmp .L_16_blocks_ok_rboylvBCxohyFxr .L_16_blocks_overflow_rboylvBCxohyFxr: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 .L_16_blocks_ok_rboylvBCxohyFxr: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm3,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm19,%zmm19{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vextracti32x4 $3,%zmm19,%xmm7 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_iitjdnjexGtAzlA subq $16,%r8 movl $0,(%rdx) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_iitjdnjexGtAzlA .L_small_initial_partial_block_iitjdnjexGtAzlA: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 144(%r10),%zmm1 .byte 98,115,117,64,68,193,17 .byte 98,227,117,64,68,241,0 .byte 98,99,117,64,68,241,1 .byte 98,99,117,64,68,249,16 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,101,64,68,225,1 .byte 98,243,101,64,68,233,16 .byte 98,243,101,64,68,193,17 .byte 98,243,101,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_iitjdnjexGtAzlA: orq %r8,%r8 je .L_after_reduction_iitjdnjexGtAzlA vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_iitjdnjexGtAzlA: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_9_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $247,%r15d jae .L_16_blocks_overflow_kwzbcrnlszssDoA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %xmm27,%xmm3,%xmm4 jmp .L_16_blocks_ok_kwzbcrnlszssDoA .L_16_blocks_overflow_kwzbcrnlszssDoA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 .L_16_blocks_ok_kwzbcrnlszssDoA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %xmm30,%xmm4,%xmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%xmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,8,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,8,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,8,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %xmm20,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %xmm29,%xmm20,%xmm20 vextracti32x4 $0,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_EFvfhGiioywrajC subq $16,%r8 movl $0,(%rdx) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_EFvfhGiioywrajC .L_small_initial_partial_block_EFvfhGiioywrajC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 128(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 192(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_EFvfhGiioywrajC: orq %r8,%r8 je .L_after_reduction_EFvfhGiioywrajC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_EFvfhGiioywrajC: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_10_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $246,%r15d jae .L_16_blocks_overflow_hrbjfpBdCjiGnfs vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %ymm27,%ymm3,%ymm4 jmp .L_16_blocks_ok_hrbjfpBdCjiGnfs .L_16_blocks_overflow_hrbjfpBdCjiGnfs: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 .L_16_blocks_ok_hrbjfpBdCjiGnfs: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %ymm30,%ymm4,%ymm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%ymm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,40,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,40,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,40,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %ymm20,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %ymm29,%ymm20,%ymm20 vextracti32x4 $1,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FvsGiljtuwAeGuy subq $16,%r8 movl $0,(%rdx) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FvsGiljtuwAeGuy .L_small_initial_partial_block_FvsGiljtuwAeGuy: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 112(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 176(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,93,0,68,225,1 .byte 98,243,93,0,68,233,16 .byte 98,243,93,0,68,193,17 .byte 98,243,93,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FvsGiljtuwAeGuy: orq %r8,%r8 je .L_after_reduction_FvsGiljtuwAeGuy vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_FvsGiljtuwAeGuy: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_11_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $245,%r15d jae .L_16_blocks_overflow_gffyuiCaEymxbgx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_gffyuiCaEymxbgx .L_16_blocks_overflow_gffyuiCaEymxbgx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_gffyuiCaEymxbgx: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $2,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yskGpojraEjuoeD subq $16,%r8 movl $0,(%rdx) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yskGpojraEjuoeD .L_small_initial_partial_block_yskGpojraEjuoeD: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 96(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 160(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,93,32,68,225,1 .byte 98,243,93,32,68,233,16 .byte 98,243,93,32,68,193,17 .byte 98,243,93,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yskGpojraEjuoeD: orq %r8,%r8 je .L_after_reduction_yskGpojraEjuoeD vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_yskGpojraEjuoeD: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_12_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $128,%r11 kmovq (%r10,%r11,8),%k1 cmpl $244,%r15d jae .L_16_blocks_overflow_hAjEfcezvfywBbB vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 jmp .L_16_blocks_ok_hAjEfcezvfywBbB .L_16_blocks_overflow_hAjEfcezvfywBbB: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 .L_16_blocks_ok_hAjEfcezvfywBbB: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm4,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm20,%zmm20{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vextracti32x4 $3,%zmm20,%xmm7 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_dkgezBnEGtEnaGC subq $16,%r8 movl $0,(%rdx) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_dkgezBnEGtEnaGC .L_small_initial_partial_block_dkgezBnEGtEnaGC: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 80(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 144(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vpxorq %zmm8,%zmm0,%zmm8 vpxorq %zmm22,%zmm3,%zmm22 vpxorq %zmm30,%zmm4,%zmm30 vpxorq %zmm31,%zmm5,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,93,64,68,225,1 .byte 98,243,93,64,68,233,16 .byte 98,243,93,64,68,193,17 .byte 98,243,93,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_dkgezBnEGtEnaGC: orq %r8,%r8 je .L_after_reduction_dkgezBnEGtEnaGC vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_dkgezBnEGtEnaGC: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_13_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $243,%r15d jae .L_16_blocks_overflow_jsBqmgCzCrGvyyA vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %xmm27,%xmm4,%xmm5 jmp .L_16_blocks_ok_jsBqmgCzCrGvyyA .L_16_blocks_overflow_jsBqmgCzCrGvyyA: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 .L_16_blocks_ok_jsBqmgCzCrGvyyA: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $0,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %xmm30,%xmm5,%xmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%xmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,8,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,8,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,8,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %xmm21,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %xmm29,%xmm21,%xmm21 vextracti32x4 $0,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_gEgFBvntjyjbGji subq $16,%r8 movl $0,(%rdx) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_gEgFBvntjyjbGji .L_small_initial_partial_block_gEgFBvntjyjbGji: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 64(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 128(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 192(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vpxorq %zmm26,%zmm30,%zmm30 vpxorq %zmm24,%zmm8,%zmm8 vpxorq %zmm25,%zmm22,%zmm22 vpxorq %zmm31,%zmm30,%zmm30 vpsrldq $8,%zmm30,%zmm4 vpslldq $8,%zmm30,%zmm5 vpxorq %zmm4,%zmm8,%zmm0 vpxorq %zmm5,%zmm22,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_gEgFBvntjyjbGji: orq %r8,%r8 je .L_after_reduction_gEgFBvntjyjbGji vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_gEgFBvntjyjbGji: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_14_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $242,%r15d jae .L_16_blocks_overflow_muGuwhaFlxCtAii vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %ymm27,%ymm4,%ymm5 jmp .L_16_blocks_ok_muGuwhaFlxCtAii .L_16_blocks_overflow_muGuwhaFlxCtAii: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 .L_16_blocks_ok_muGuwhaFlxCtAii: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $1,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %ymm30,%ymm5,%ymm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%ymm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,40,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,40,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,40,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %ymm21,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %ymm29,%ymm21,%ymm21 vextracti32x4 $1,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zumDfmCofGawimf subq $16,%r8 movl $0,(%rdx) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zumDfmCofGawimf .L_small_initial_partial_block_zumDfmCofGawimf: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 48(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 112(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 176(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 240(%r10),%xmm1 .byte 98,243,85,0,68,225,1 .byte 98,243,85,0,68,233,16 .byte 98,243,85,0,68,193,17 .byte 98,243,85,0,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zumDfmCofGawimf: orq %r8,%r8 je .L_after_reduction_zumDfmCofGawimf vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_zumDfmCofGawimf: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_15_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $241,%r15d jae .L_16_blocks_overflow_EpbiipkiGBkrvEx vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_EpbiipkiGBkrvEx .L_16_blocks_overflow_EpbiipkiGBkrvEx: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_EpbiipkiGBkrvEx: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $2,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $2,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_wohgmjgdAjDrcfv subq $16,%r8 movl $0,(%rdx) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_wohgmjgdAjDrcfv .L_small_initial_partial_block_wohgmjgdAjDrcfv: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 32(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 96(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 160(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 224(%r10),%ymm1 .byte 98,243,85,32,68,225,1 .byte 98,243,85,32,68,233,16 .byte 98,243,85,32,68,193,17 .byte 98,243,85,32,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_wohgmjgdAjDrcfv: orq %r8,%r8 je .L_after_reduction_wohgmjgdAjDrcfv vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_wohgmjgdAjDrcfv: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_16_uGbbotznadbtwnB: leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r11 subq $192,%r11 kmovq (%r10,%r11,8),%k1 cmpl $240,%r15d jae .L_16_blocks_overflow_etuAklrEovqCDpq vpaddd %zmm28,%zmm2,%zmm0 vpaddd %zmm27,%zmm0,%zmm3 vpaddd %zmm27,%zmm3,%zmm4 vpaddd %zmm27,%zmm4,%zmm5 jmp .L_16_blocks_ok_etuAklrEovqCDpq .L_16_blocks_overflow_etuAklrEovqCDpq: vpshufb %zmm29,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vmovdqa64 ddq_add_4444(%rip),%zmm5 vpaddd %zmm5,%zmm0,%zmm3 vpaddd %zmm5,%zmm3,%zmm4 vpaddd %zmm5,%zmm4,%zmm5 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 .L_16_blocks_ok_etuAklrEovqCDpq: vbroadcastf64x2 0(%rdi),%zmm30 vpxorq 768(%rsp),%zmm14,%zmm8 vmovdqu64 0(%rsp,%rbx,1),%zmm1 vextracti32x4 $3,%zmm5,%xmm2 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vbroadcastf64x2 16(%rdi),%zmm31 vmovdqu64 64(%rsp,%rbx,1),%zmm18 vmovdqa64 832(%rsp),%zmm22 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm30,%zmm3,%zmm3 vpxorq %zmm30,%zmm4,%zmm4 vpxorq %zmm30,%zmm5,%zmm5 vbroadcastf64x2 32(%rdi),%zmm30 .byte 98,115,61,72,68,241,17 .byte 98,243,61,72,68,249,0 .byte 98,115,61,72,68,209,1 .byte 98,115,61,72,68,217,16 vmovdqu64 128(%rsp,%rbx,1),%zmm1 vmovdqa64 896(%rsp),%zmm8 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 vmovdqu64 192(%rsp,%rbx,1),%zmm18 vmovdqa64 960(%rsp),%zmm22 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 64(%rdi),%zmm30 .byte 98,227,61,72,68,225,16 .byte 98,227,61,72,68,233,1 .byte 98,227,61,72,68,201,17 .byte 98,227,61,72,68,217,0 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm31 vpternlogq $0x96,%zmm17,%zmm12,%zmm14 vpternlogq $0x96,%zmm19,%zmm13,%zmm7 vpternlogq $0x96,%zmm21,%zmm16,%zmm11 vpternlogq $0x96,%zmm20,%zmm15,%zmm10 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 96(%rdi),%zmm30 vmovdqu8 0(%rcx,%rax,1),%zmm17 vmovdqu8 64(%rcx,%rax,1),%zmm19 vmovdqu8 128(%rcx,%rax,1),%zmm20 vmovdqu8 192(%rcx,%rax,1),%zmm21{%k1}{z} .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm31 .byte 98,51,77,64,68,250,16 .byte 98,163,77,64,68,194,1 .byte 98,51,77,64,68,226,17 .byte 98,51,77,64,68,234,0 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 128(%rdi),%zmm30 vpternlogq $0x96,%zmm16,%zmm11,%zmm10 vpxorq %zmm12,%zmm14,%zmm24 vpxorq %zmm13,%zmm7,%zmm25 vpxorq %zmm15,%zmm10,%zmm26 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 160(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 192(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm31 .byte 98,146,125,72,220,198 .byte 98,146,101,72,220,222 .byte 98,146,93,72,220,230 .byte 98,146,85,72,220,238 vbroadcastf64x2 224(%rdi),%zmm30 .byte 98,146,125,72,220,199 .byte 98,146,101,72,220,223 .byte 98,146,93,72,220,231 .byte 98,146,85,72,220,239 .byte 98,146,125,72,221,198 .byte 98,146,101,72,221,222 .byte 98,146,93,72,221,230 .byte 98,146,85,72,221,238 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vpxorq %zmm20,%zmm4,%zmm4 vpxorq %zmm21,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm11 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm21,%zmm21{%k1}{z} vpshufb %zmm29,%zmm17,%zmm17 vpshufb %zmm29,%zmm19,%zmm19 vpshufb %zmm29,%zmm20,%zmm20 vpshufb %zmm29,%zmm21,%zmm21 vextracti32x4 $3,%zmm21,%xmm7 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_xGuCpnrvibyoyay: movl %r8d,(%rdx) vmovdqu64 %xmm11,16(%rsi) vmovdqu64 16(%r10),%zmm1 .byte 98,243,117,64,68,193,17 .byte 98,243,117,64,68,217,0 .byte 98,243,117,64,68,225,1 .byte 98,243,117,64,68,233,16 vmovdqu64 80(%r10),%zmm1 .byte 98,115,101,64,68,193,17 .byte 98,227,101,64,68,241,0 .byte 98,99,101,64,68,241,1 .byte 98,99,101,64,68,249,16 vmovdqu64 144(%r10),%zmm1 .byte 98,227,93,64,68,201,17 .byte 98,227,93,64,68,217,0 vpternlogq $0x96,%zmm0,%zmm17,%zmm8 vpternlogq $0x96,%zmm3,%zmm19,%zmm22 .byte 98,227,93,64,68,201,1 .byte 98,227,93,64,68,217,16 vpternlogq $0x96,%zmm4,%zmm17,%zmm30 vpternlogq $0x96,%zmm5,%zmm19,%zmm31 vmovdqu64 208(%r10),%ymm1 vinserti64x2 $2,240(%r10),%zmm1,%zmm1 .byte 98,243,85,64,68,225,1 .byte 98,243,85,64,68,233,16 .byte 98,243,85,64,68,193,17 .byte 98,243,85,64,68,217,0 vpxorq %zmm30,%zmm4,%zmm4 vpternlogq $0x96,%zmm31,%zmm26,%zmm5 vpternlogq $0x96,%zmm8,%zmm24,%zmm0 vpternlogq $0x96,%zmm22,%zmm25,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm30 vpslldq $8,%zmm4,%zmm31 vpxorq %zmm30,%zmm0,%zmm0 vpxorq %zmm31,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm30 vpxorq %ymm30,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm30 vpxorq %xmm30,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm31 vpxorq %ymm31,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm31 vpxorq %xmm31,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm1 .byte 98,243,117,8,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,117,8,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,117,8,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_xGuCpnrvibyoyay: vpxorq %xmm7,%xmm14,%xmm14 .L_after_reduction_xGuCpnrvibyoyay: jmp .L_last_blocks_done_uGbbotznadbtwnB .L_last_num_blocks_is_0_uGbbotznadbtwnB: vmovdqa64 768(%rsp),%zmm13 vpxorq %zmm14,%zmm13,%zmm13 vmovdqu64 0(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 832(%rsp),%zmm13 vmovdqu64 64(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpxorq %zmm10,%zmm4,%zmm26 vpxorq %zmm6,%zmm0,%zmm24 vpxorq %zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vmovdqa64 896(%rsp),%zmm13 vmovdqu64 128(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,196,17 .byte 98,211,21,72,68,220,0 .byte 98,211,21,72,68,228,1 .byte 98,211,21,72,68,236,16 vmovdqa64 960(%rsp),%zmm13 vmovdqu64 192(%rsp,%rbx,1),%zmm12 .byte 98,211,21,72,68,244,17 .byte 98,211,21,72,68,252,0 .byte 98,83,21,72,68,212,1 .byte 98,83,21,72,68,220,16 vpternlogq $0x96,%zmm10,%zmm4,%zmm26 vpternlogq $0x96,%zmm6,%zmm0,%zmm24 vpternlogq $0x96,%zmm7,%zmm3,%zmm25 vpternlogq $0x96,%zmm11,%zmm5,%zmm26 vpsrldq $8,%zmm26,%zmm0 vpslldq $8,%zmm26,%zmm3 vpxorq %zmm0,%zmm24,%zmm24 vpxorq %zmm3,%zmm25,%zmm25 vextracti64x4 $1,%zmm24,%ymm0 vpxorq %ymm0,%ymm24,%ymm24 vextracti32x4 $1,%ymm24,%xmm0 vpxorq %xmm0,%xmm24,%xmm24 vextracti64x4 $1,%zmm25,%ymm3 vpxorq %ymm3,%ymm25,%ymm25 vextracti32x4 $1,%ymm25,%xmm3 vpxorq %xmm3,%xmm25,%xmm25 vmovdqa64 POLY2(%rip),%xmm4 .byte 98,147,93,8,68,193,1 vpslldq $8,%xmm0,%xmm0 vpxorq %xmm0,%xmm25,%xmm0 .byte 98,243,93,8,68,216,0 vpsrldq $4,%xmm3,%xmm3 .byte 98,115,93,8,68,240,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm24,%xmm3,%xmm14 .L_last_blocks_done_uGbbotznadbtwnB: vpshufb %xmm29,%xmm2,%xmm2 jmp .L_ghash_done_keEetjmxflGqBfv .L_message_below_equal_16_blocks_keEetjmxflGqBfv: movl %r8d,%r12d addl $15,%r12d shrl $4,%r12d cmpq $8,%r12 je .L_small_initial_num_blocks_is_8_vkDeiBlhaznkthD jl .L_small_initial_num_blocks_is_7_1_vkDeiBlhaznkthD cmpq $12,%r12 je .L_small_initial_num_blocks_is_12_vkDeiBlhaznkthD jl .L_small_initial_num_blocks_is_11_9_vkDeiBlhaznkthD cmpq $16,%r12 je .L_small_initial_num_blocks_is_16_vkDeiBlhaznkthD cmpq $15,%r12 je .L_small_initial_num_blocks_is_15_vkDeiBlhaznkthD cmpq $14,%r12 je .L_small_initial_num_blocks_is_14_vkDeiBlhaznkthD jmp .L_small_initial_num_blocks_is_13_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_11_9_vkDeiBlhaznkthD: cmpq $11,%r12 je .L_small_initial_num_blocks_is_11_vkDeiBlhaznkthD cmpq $10,%r12 je .L_small_initial_num_blocks_is_10_vkDeiBlhaznkthD jmp .L_small_initial_num_blocks_is_9_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_7_1_vkDeiBlhaznkthD: cmpq $4,%r12 je .L_small_initial_num_blocks_is_4_vkDeiBlhaznkthD jl .L_small_initial_num_blocks_is_3_1_vkDeiBlhaznkthD cmpq $7,%r12 je .L_small_initial_num_blocks_is_7_vkDeiBlhaznkthD cmpq $6,%r12 je .L_small_initial_num_blocks_is_6_vkDeiBlhaznkthD jmp .L_small_initial_num_blocks_is_5_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_3_1_vkDeiBlhaznkthD: cmpq $3,%r12 je .L_small_initial_num_blocks_is_3_vkDeiBlhaznkthD cmpq $2,%r12 je .L_small_initial_num_blocks_is_2_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_1_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%xmm29 vpaddd ONEa(%rip),%xmm2,%xmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm0,%xmm2 vpshufb %xmm29,%xmm0,%xmm0 vmovdqu8 0(%rcx,%rax,1),%xmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %xmm15,%xmm0,%xmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,8,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,8,221,199 vpxorq %xmm6,%xmm0,%xmm0 vextracti32x4 $0,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %xmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %xmm29,%xmm6,%xmm6 vextracti32x4 $0,%zmm6,%xmm13 leaq 80(%rsi),%r10 cmpq $16,%r8 jl .L_small_initial_partial_block_pelykqxdehCqvkk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pelykqxdehCqvkk .L_small_initial_partial_block_pelykqxdehCqvkk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %xmm13,%xmm14,%xmm14 jmp .L_after_reduction_pelykqxdehCqvkk .L_small_initial_compute_done_pelykqxdehCqvkk: .L_after_reduction_pelykqxdehCqvkk: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_2_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%ymm29 vshufi64x2 $0,%ymm2,%ymm2,%ymm0 vpaddd ddq_add_1234(%rip),%ymm0,%ymm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm0,%xmm2 vpshufb %ymm29,%ymm0,%ymm0 vmovdqu8 0(%rcx,%rax,1),%ymm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %ymm15,%ymm0,%ymm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,40,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,40,221,199 vpxorq %ymm6,%ymm0,%ymm0 vextracti32x4 $1,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %ymm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %ymm29,%ymm6,%ymm6 vextracti32x4 $1,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (2 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ldyuFtpzipDvehA subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ldyuFtpzipDvehA .L_small_initial_partial_block_ldyuFtpzipDvehA: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 240(%r10),%xmm20 .byte 98,179,77,8,68,228,1 .byte 98,179,77,8,68,236,16 .byte 98,179,77,8,68,196,17 .byte 98,179,77,8,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ldyuFtpzipDvehA: orq %r8,%r8 je .L_after_reduction_ldyuFtpzipDvehA vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_ldyuFtpzipDvehA: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_3_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $2,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $2,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (3 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_trwwageihBqcfkh subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_trwwageihBqcfkh .L_small_initial_partial_block_trwwageihBqcfkh: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 224(%r10),%ymm20 .byte 98,179,77,40,68,228,1 .byte 98,179,77,40,68,236,16 .byte 98,179,77,40,68,196,17 .byte 98,179,77,40,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_trwwageihBqcfkh: orq %r8,%r8 je .L_after_reduction_trwwageihBqcfkh vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_trwwageihBqcfkh: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_4_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm0,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vmovdqu8 0(%rcx,%rax,1),%zmm6{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 vpxorq %zmm6,%zmm0,%zmm0 vextracti32x4 $3,%zmm0,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1){%k1} vmovdqu8 %zmm0,%zmm0{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vextracti32x4 $3,%zmm6,%xmm13 leaq 80(%rsi),%r10 subq $16 * (4 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_yotsdxeGEAxlmrj subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_yotsdxeGEAxlmrj .L_small_initial_partial_block_yotsdxeGEAxlmrj: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_yotsdxeGEAxlmrj: orq %r8,%r8 je .L_after_reduction_yotsdxeGEAxlmrj vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_yotsdxeGEAxlmrj: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_5_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %xmm29,%xmm3,%xmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%xmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %xmm15,%xmm3,%xmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,8,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,8,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %xmm7,%xmm3,%xmm3 vextracti32x4 $0,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %xmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %xmm29,%xmm7,%xmm7 vextracti32x4 $0,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (5 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_rmwxtkgdnBhEnAk subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_rmwxtkgdnBhEnAk .L_small_initial_partial_block_rmwxtkgdnBhEnAk: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 192(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_rmwxtkgdnBhEnAk: orq %r8,%r8 je .L_after_reduction_rmwxtkgdnBhEnAk vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_rmwxtkgdnBhEnAk: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_6_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %ymm29,%ymm3,%ymm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%ymm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %ymm15,%ymm3,%ymm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,40,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,40,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %ymm7,%ymm3,%ymm3 vextracti32x4 $1,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %ymm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %ymm29,%ymm7,%ymm7 vextracti32x4 $1,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (6 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_hGvwkbDFDGzDyAp subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_hGvwkbDFDGzDyAp .L_small_initial_partial_block_hGvwkbDFDGzDyAp: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 176(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 240(%r10),%xmm20 .byte 98,179,69,8,68,228,1 .byte 98,179,69,8,68,236,16 .byte 98,179,69,8,68,196,17 .byte 98,179,69,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_hGvwkbDFDGzDyAp: orq %r8,%r8 je .L_after_reduction_hGvwkbDFDGzDyAp vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_hGvwkbDFDGzDyAp: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_7_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $2,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $2,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (7 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_FchyqAlDxAtkgym subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_FchyqAlDxAtkgym .L_small_initial_partial_block_FchyqAlDxAtkgym: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 160(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 224(%r10),%ymm20 .byte 98,179,69,40,68,228,1 .byte 98,179,69,40,68,236,16 .byte 98,179,69,40,68,196,17 .byte 98,179,69,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_FchyqAlDxAtkgym: orq %r8,%r8 je .L_after_reduction_FchyqAlDxAtkgym vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_FchyqAlDxAtkgym: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_8_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $64,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm3,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vextracti32x4 $3,%zmm3,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1){%k1} vmovdqu8 %zmm3,%zmm3{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vextracti32x4 $3,%zmm7,%xmm13 leaq 80(%rsi),%r10 subq $16 * (8 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_ChlBCihfFcxfpre subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_ChlBCihfFcxfpre .L_small_initial_partial_block_ChlBCihfFcxfpre: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 144(%r10),%zmm20 .byte 98,51,77,72,68,252,17 .byte 98,163,77,72,68,196,0 .byte 98,163,77,72,68,204,1 .byte 98,163,77,72,68,220,16 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,69,72,68,228,1 .byte 98,179,69,72,68,236,16 .byte 98,179,69,72,68,196,17 .byte 98,179,69,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_ChlBCihfFcxfpre: orq %r8,%r8 je .L_after_reduction_ChlBCihfFcxfpre vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_ChlBCihfFcxfpre: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_9_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %xmm29,%xmm4,%xmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%xmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %xmm15,%xmm4,%xmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,8,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,8,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %xmm10,%xmm4,%xmm4 vextracti32x4 $0,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %xmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %xmm29,%xmm10,%xmm10 vextracti32x4 $0,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (9 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_trxojfuEtotExGB subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_trxojfuEtotExGB .L_small_initial_partial_block_trxojfuEtotExGB: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 128(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 192(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_trxojfuEtotExGB: orq %r8,%r8 je .L_after_reduction_trxojfuEtotExGB vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_trxojfuEtotExGB: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_10_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %ymm29,%ymm4,%ymm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%ymm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %ymm15,%ymm4,%ymm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,40,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,40,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %ymm10,%ymm4,%ymm4 vextracti32x4 $1,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %ymm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %ymm29,%ymm10,%ymm10 vextracti32x4 $1,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (10 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_zdivCCwEFvrsaiu subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_zdivCCwEFvrsaiu .L_small_initial_partial_block_zdivCCwEFvrsaiu: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 112(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 176(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,45,8,68,228,1 .byte 98,179,45,8,68,236,16 .byte 98,179,45,8,68,196,17 .byte 98,179,45,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_zdivCCwEFvrsaiu: orq %r8,%r8 je .L_after_reduction_zdivCCwEFvrsaiu vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_zdivCCwEFvrsaiu: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_11_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $2,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $2,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (11 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_cbByewwahwBzpzx subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_cbByewwahwBzpzx .L_small_initial_partial_block_cbByewwahwBzpzx: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 96(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 160(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,45,40,68,228,1 .byte 98,179,45,40,68,236,16 .byte 98,179,45,40,68,196,17 .byte 98,179,45,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_cbByewwahwBzpzx: orq %r8,%r8 je .L_after_reduction_cbByewwahwBzpzx vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_cbByewwahwBzpzx: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_12_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $128,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm4,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vextracti32x4 $3,%zmm4,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1){%k1} vmovdqu8 %zmm4,%zmm4{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vextracti32x4 $3,%zmm10,%xmm13 leaq 80(%rsi),%r10 subq $16 * (12 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_vpiEDoFuFgdvCsg subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_vpiEDoFuFgdvCsg .L_small_initial_partial_block_vpiEDoFuFgdvCsg: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 80(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 144(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vpxorq %zmm15,%zmm0,%zmm15 vpxorq %zmm16,%zmm3,%zmm16 vpxorq %zmm17,%zmm4,%zmm17 vpxorq %zmm19,%zmm5,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,45,72,68,228,1 .byte 98,179,45,72,68,236,16 .byte 98,179,45,72,68,196,17 .byte 98,179,45,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_vpiEDoFuFgdvCsg: orq %r8,%r8 je .L_after_reduction_vpiEDoFuFgdvCsg vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_vpiEDoFuFgdvCsg: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_13_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $0,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %xmm29,%xmm5,%xmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%xmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %xmm15,%xmm5,%xmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,8,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,8,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %xmm11,%xmm5,%xmm5 vextracti32x4 $0,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %xmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %xmm29,%xmm11,%xmm11 vextracti32x4 $0,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (13 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_pkeazcEqwkcpavG subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_pkeazcEqwkcpavG .L_small_initial_partial_block_pkeazcEqwkcpavG: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 64(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 128(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 192(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vpxorq %zmm19,%zmm17,%zmm17 vpsrldq $8,%zmm17,%zmm4 vpslldq $8,%zmm17,%zmm5 vpxorq %zmm4,%zmm15,%zmm0 vpxorq %zmm5,%zmm16,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_pkeazcEqwkcpavG: orq %r8,%r8 je .L_after_reduction_pkeazcEqwkcpavG vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_pkeazcEqwkcpavG: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_14_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $1,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %ymm29,%ymm5,%ymm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%ymm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %ymm15,%ymm5,%ymm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,40,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,40,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %ymm11,%ymm5,%ymm5 vextracti32x4 $1,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %ymm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %ymm29,%ymm11,%ymm11 vextracti32x4 $1,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (14 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_GsuCukqqbwGpxDi subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_GsuCukqqbwGpxDi .L_small_initial_partial_block_GsuCukqqbwGpxDi: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 48(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 112(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 176(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 240(%r10),%xmm20 .byte 98,179,37,8,68,228,1 .byte 98,179,37,8,68,236,16 .byte 98,179,37,8,68,196,17 .byte 98,179,37,8,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_GsuCukqqbwGpxDi: orq %r8,%r8 je .L_after_reduction_GsuCukqqbwGpxDi vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_GsuCukqqbwGpxDi: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_15_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $2,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $2,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $2,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (15 - 1),%r8 cmpq $16,%r8 jl .L_small_initial_partial_block_mbxlopCmuqdpqjz subq $16,%r8 movl $0,(%rdx) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 jmp .L_small_initial_compute_done_mbxlopCmuqdpqjz .L_small_initial_partial_block_mbxlopCmuqdpqjz: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 32(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 96(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 160(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 224(%r10),%ymm20 .byte 98,179,37,40,68,228,1 .byte 98,179,37,40,68,236,16 .byte 98,179,37,40,68,196,17 .byte 98,179,37,40,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_mbxlopCmuqdpqjz: orq %r8,%r8 je .L_after_reduction_mbxlopCmuqdpqjz vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_mbxlopCmuqdpqjz: jmp .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD .L_small_initial_num_blocks_is_16_vkDeiBlhaznkthD: vmovdqa64 SHUF_MASK(%rip),%zmm29 vshufi64x2 $0,%zmm2,%zmm2,%zmm2 vpaddd ddq_add_1234(%rip),%zmm2,%zmm0 vpaddd ddq_add_5678(%rip),%zmm2,%zmm3 vpaddd ddq_add_8888(%rip),%zmm0,%zmm4 vpaddd ddq_add_8888(%rip),%zmm3,%zmm5 leaq byte64_len_to_mask_table(%rip),%r10 movq %r8,%r15 subq $192,%r15 kmovq (%r10,%r15,8),%k1 vextracti32x4 $3,%zmm5,%xmm2 vpshufb %zmm29,%zmm0,%zmm0 vpshufb %zmm29,%zmm3,%zmm3 vpshufb %zmm29,%zmm4,%zmm4 vpshufb %zmm29,%zmm5,%zmm5 vmovdqu8 0(%rcx,%rax,1),%zmm6 vmovdqu8 64(%rcx,%rax,1),%zmm7 vmovdqu8 128(%rcx,%rax,1),%zmm10 vmovdqu8 192(%rcx,%rax,1),%zmm11{%k1}{z} vbroadcastf64x2 0(%rdi),%zmm15 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm15,%zmm3,%zmm3 vpxorq %zmm15,%zmm4,%zmm4 vpxorq %zmm15,%zmm5,%zmm5 vbroadcastf64x2 16(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 32(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 48(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 64(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 80(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 96(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 112(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 128(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 144(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 160(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 176(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 192(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 208(%rdi),%zmm15 .byte 98,210,125,72,220,199 .byte 98,210,101,72,220,223 .byte 98,210,93,72,220,231 .byte 98,210,85,72,220,239 vbroadcastf64x2 224(%rdi),%zmm15 .byte 98,210,125,72,221,199 .byte 98,210,101,72,221,223 .byte 98,210,93,72,221,231 .byte 98,210,85,72,221,239 vpxorq %zmm6,%zmm0,%zmm0 vpxorq %zmm7,%zmm3,%zmm3 vpxorq %zmm10,%zmm4,%zmm4 vpxorq %zmm11,%zmm5,%zmm5 vextracti32x4 $3,%zmm5,%xmm12 movq %r9,%r10 vmovdqu8 %zmm0,0(%r10,%rax,1) vmovdqu8 %zmm3,64(%r10,%rax,1) vmovdqu8 %zmm4,128(%r10,%rax,1) vmovdqu8 %zmm5,192(%r10,%rax,1){%k1} vmovdqu8 %zmm5,%zmm5{%k1}{z} vpshufb %zmm29,%zmm6,%zmm6 vpshufb %zmm29,%zmm7,%zmm7 vpshufb %zmm29,%zmm10,%zmm10 vpshufb %zmm29,%zmm11,%zmm11 vextracti32x4 $3,%zmm11,%xmm13 leaq 80(%rsi),%r10 subq $16 * (16 - 1),%r8 .L_small_initial_partial_block_fpGgFAenBuAyutw: movl %r8d,(%rdx) vmovdqu64 %xmm12,16(%rsi) vpxorq %zmm14,%zmm6,%zmm6 vmovdqu64 16(%r10),%zmm20 .byte 98,179,77,72,68,196,17 .byte 98,179,77,72,68,220,0 .byte 98,179,77,72,68,228,1 .byte 98,179,77,72,68,236,16 vmovdqu64 80(%r10),%zmm20 .byte 98,51,69,72,68,252,17 .byte 98,163,69,72,68,196,0 .byte 98,163,69,72,68,204,1 .byte 98,163,69,72,68,220,16 vmovdqu64 144(%r10),%zmm20 .byte 98,179,45,72,68,244,17 .byte 98,179,45,72,68,252,0 vpternlogq $0x96,%zmm0,%zmm6,%zmm15 vpternlogq $0x96,%zmm3,%zmm7,%zmm16 .byte 98,179,45,72,68,244,1 .byte 98,179,45,72,68,252,16 vpternlogq $0x96,%zmm4,%zmm6,%zmm17 vpternlogq $0x96,%zmm5,%zmm7,%zmm19 vmovdqu64 208(%r10),%ymm20 vinserti64x2 $2,240(%r10),%zmm20,%zmm20 .byte 98,179,37,72,68,228,1 .byte 98,179,37,72,68,236,16 .byte 98,179,37,72,68,196,17 .byte 98,179,37,72,68,220,0 vpxorq %zmm17,%zmm4,%zmm4 vpxorq %zmm19,%zmm5,%zmm5 vpxorq %zmm15,%zmm0,%zmm0 vpxorq %zmm16,%zmm3,%zmm3 vpxorq %zmm5,%zmm4,%zmm4 vpsrldq $8,%zmm4,%zmm17 vpslldq $8,%zmm4,%zmm19 vpxorq %zmm17,%zmm0,%zmm0 vpxorq %zmm19,%zmm3,%zmm3 vextracti64x4 $1,%zmm0,%ymm17 vpxorq %ymm17,%ymm0,%ymm0 vextracti32x4 $1,%ymm0,%xmm17 vpxorq %xmm17,%xmm0,%xmm0 vextracti64x4 $1,%zmm3,%ymm19 vpxorq %ymm19,%ymm3,%ymm3 vextracti32x4 $1,%ymm3,%xmm19 vpxorq %xmm19,%xmm3,%xmm3 vmovdqa64 POLY2(%rip),%xmm20 .byte 98,243,93,0,68,227,1 vpslldq $8,%xmm4,%xmm4 vpxorq %xmm4,%xmm3,%xmm4 .byte 98,243,93,0,68,236,0 vpsrldq $4,%xmm5,%xmm5 .byte 98,115,93,0,68,244,16 vpslldq $4,%xmm14,%xmm14 vpternlogq $0x96,%xmm0,%xmm5,%xmm14 .L_small_initial_compute_done_fpGgFAenBuAyutw: vpxorq %xmm13,%xmm14,%xmm14 .L_after_reduction_fpGgFAenBuAyutw: .L_small_initial_blocks_encrypted_vkDeiBlhaznkthD: .L_ghash_done_keEetjmxflGqBfv: vmovdqu64 %xmm2,0(%rsi) .L_enc_dec_done_keEetjmxflGqBfv: vpshufb SHUF_MASK(%rip),%xmm14,%xmm14 vmovdqu64 %xmm14,64(%rsi) .L_enc_dec_abort_keEetjmxflGqBfv: jmp .Lexit_gcm_decrypt .Lexit_gcm_decrypt: cmpq $256,%r8 jbe .Lskip_hkeys_cleanup_byhoEGxnfawfFqd vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %zmm0,0(%rsp) vmovdqa64 %zmm0,64(%rsp) vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) vmovdqa64 %zmm0,320(%rsp) vmovdqa64 %zmm0,384(%rsp) vmovdqa64 %zmm0,448(%rsp) vmovdqa64 %zmm0,512(%rsp) vmovdqa64 %zmm0,576(%rsp) vmovdqa64 %zmm0,640(%rsp) vmovdqa64 %zmm0,704(%rsp) .Lskip_hkeys_cleanup_byhoEGxnfawfFqd: vzeroupper leaq (%rbp),%rsp .cfi_def_cfa_register %rsp popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore %r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore %r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore %rbx .byte 0xf3,0xc3 .Ldecrypt_seh_end: .cfi_endproc .size aes_gcm_decrypt_avx512, .-aes_gcm_decrypt_avx512 .section .rodata .align 16 POLY:.quad 0x0000000000000001, 0xC200000000000000 .align 64 POLY2: .quad 0x00000001C2000000, 0xC200000000000000 .quad 0x00000001C2000000, 0xC200000000000000 .quad 0x00000001C2000000, 0xC200000000000000 .quad 0x00000001C2000000, 0xC200000000000000 .align 16 TWOONE:.quad 0x0000000000000001, 0x0000000100000000 .align 64 SHUF_MASK: .quad 0x08090A0B0C0D0E0F, 0x0001020304050607 .quad 0x08090A0B0C0D0E0F, 0x0001020304050607 .quad 0x08090A0B0C0D0E0F, 0x0001020304050607 .quad 0x08090A0B0C0D0E0F, 0x0001020304050607 .align 16 SHIFT_MASK: .quad 0x0706050403020100, 0x0f0e0d0c0b0a0908 ALL_F: .quad 0xffffffffffffffff, 0xffffffffffffffff ZERO: .quad 0x0000000000000000, 0x0000000000000000 .align 16 ONEa: .quad 0x0000000000000001, 0x0000000000000000 .align 16 ONEf: .quad 0x0000000000000000, 0x0100000000000000 .align 64 ddq_add_1234: .quad 0x0000000000000001, 0x0000000000000000 .quad 0x0000000000000002, 0x0000000000000000 .quad 0x0000000000000003, 0x0000000000000000 .quad 0x0000000000000004, 0x0000000000000000 .align 64 ddq_add_5678: .quad 0x0000000000000005, 0x0000000000000000 .quad 0x0000000000000006, 0x0000000000000000 .quad 0x0000000000000007, 0x0000000000000000 .quad 0x0000000000000008, 0x0000000000000000 .align 64 ddq_add_4444: .quad 0x0000000000000004, 0x0000000000000000 .quad 0x0000000000000004, 0x0000000000000000 .quad 0x0000000000000004, 0x0000000000000000 .quad 0x0000000000000004, 0x0000000000000000 .align 64 ddq_add_8888: .quad 0x0000000000000008, 0x0000000000000000 .quad 0x0000000000000008, 0x0000000000000000 .quad 0x0000000000000008, 0x0000000000000000 .quad 0x0000000000000008, 0x0000000000000000 .align 64 ddq_addbe_1234: .quad 0x0000000000000000, 0x0100000000000000 .quad 0x0000000000000000, 0x0200000000000000 .quad 0x0000000000000000, 0x0300000000000000 .quad 0x0000000000000000, 0x0400000000000000 .align 64 ddq_addbe_4444: .quad 0x0000000000000000, 0x0400000000000000 .quad 0x0000000000000000, 0x0400000000000000 .quad 0x0000000000000000, 0x0400000000000000 .quad 0x0000000000000000, 0x0400000000000000 .align 64 byte_len_to_mask_table: .value 0x0000, 0x0001, 0x0003, 0x0007 .value 0x000f, 0x001f, 0x003f, 0x007f .value 0x00ff, 0x01ff, 0x03ff, 0x07ff .value 0x0fff, 0x1fff, 0x3fff, 0x7fff .value 0xffff .align 64 byte64_len_to_mask_table: .quad 0x0000000000000000, 0x0000000000000001 .quad 0x0000000000000003, 0x0000000000000007 .quad 0x000000000000000f, 0x000000000000001f .quad 0x000000000000003f, 0x000000000000007f .quad 0x00000000000000ff, 0x00000000000001ff .quad 0x00000000000003ff, 0x00000000000007ff .quad 0x0000000000000fff, 0x0000000000001fff .quad 0x0000000000003fff, 0x0000000000007fff .quad 0x000000000000ffff, 0x000000000001ffff .quad 0x000000000003ffff, 0x000000000007ffff .quad 0x00000000000fffff, 0x00000000001fffff .quad 0x00000000003fffff, 0x00000000007fffff .quad 0x0000000000ffffff, 0x0000000001ffffff .quad 0x0000000003ffffff, 0x0000000007ffffff .quad 0x000000000fffffff, 0x000000001fffffff .quad 0x000000003fffffff, 0x000000007fffffff .quad 0x00000000ffffffff, 0x00000001ffffffff .quad 0x00000003ffffffff, 0x00000007ffffffff .quad 0x0000000fffffffff, 0x0000001fffffffff .quad 0x0000003fffffffff, 0x0000007fffffffff .quad 0x000000ffffffffff, 0x000001ffffffffff .quad 0x000003ffffffffff, 0x000007ffffffffff .quad 0x00000fffffffffff, 0x00001fffffffffff .quad 0x00003fffffffffff, 0x00007fffffffffff .quad 0x0000ffffffffffff, 0x0001ffffffffffff .quad 0x0003ffffffffffff, 0x0007ffffffffffff .quad 0x000fffffffffffff, 0x001fffffffffffff .quad 0x003fffffffffffff, 0x007fffffffffffff .quad 0x00ffffffffffffff, 0x01ffffffffffffff .quad 0x03ffffffffffffff, 0x07ffffffffffffff .quad 0x0fffffffffffffff, 0x1fffffffffffffff .quad 0x3fffffffffffffff, 0x7fffffffffffffff .quad 0xffffffffffffffff .text #endif #endif
marvin-hansen/iggy-streaming-system
5,323
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .type gcm_gmult_ssse3, @function .globl gcm_gmult_ssse3 .hidden gcm_gmult_ssse3 .align 16 gcm_gmult_ssse3: .cfi_startproc _CET_ENDBR movdqu (%rdi),%xmm0 movdqa .Lreverse_bytes(%rip),%xmm10 movdqa .Llow4_mask(%rip),%xmm2 .byte 102,65,15,56,0,194 movdqa %xmm2,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm2,%xmm0 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 movq $5,%rax .Loop_row_1: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz .Loop_row_1 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movq $5,%rax .Loop_row_2: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz .Loop_row_2 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movq $6,%rax .Loop_row_3: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz .Loop_row_3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 .byte 102,65,15,56,0,210 movdqu %xmm2,(%rdi) pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 .byte 0xf3,0xc3 .cfi_endproc .size gcm_gmult_ssse3,.-gcm_gmult_ssse3 .type gcm_ghash_ssse3, @function .globl gcm_ghash_ssse3 .hidden gcm_ghash_ssse3 .align 16 gcm_ghash_ssse3: .cfi_startproc _CET_ENDBR movdqu (%rdi),%xmm0 movdqa .Lreverse_bytes(%rip),%xmm10 movdqa .Llow4_mask(%rip),%xmm11 andq $-16,%rcx .byte 102,65,15,56,0,194 pxor %xmm3,%xmm3 .Loop_ghash: movdqu (%rdx),%xmm1 .byte 102,65,15,56,0,202 pxor %xmm1,%xmm0 movdqa %xmm11,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm11,%xmm0 pxor %xmm2,%xmm2 movq $5,%rax .Loop_row_4: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz .Loop_row_4 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movq $5,%rax .Loop_row_5: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz .Loop_row_5 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movq $6,%rax .Loop_row_6: movdqa (%rsi),%xmm4 leaq 16(%rsi),%rsi movdqa %xmm2,%xmm6 .byte 102,15,58,15,243,1 movdqa %xmm6,%xmm3 psrldq $1,%xmm2 movdqa %xmm4,%xmm5 .byte 102,15,56,0,224 .byte 102,15,56,0,233 pxor %xmm5,%xmm2 movdqa %xmm4,%xmm5 psllq $60,%xmm5 movdqa %xmm5,%xmm6 pslldq $8,%xmm6 pxor %xmm6,%xmm3 psrldq $8,%xmm5 pxor %xmm5,%xmm2 psrlq $4,%xmm4 pxor %xmm4,%xmm2 subq $1,%rax jnz .Loop_row_6 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $1,%xmm3 pxor %xmm3,%xmm2 psrlq $5,%xmm3 pxor %xmm3,%xmm2 pxor %xmm3,%xmm3 movdqa %xmm2,%xmm0 leaq -256(%rsi),%rsi leaq 16(%rdx),%rdx subq $16,%rcx jnz .Loop_ghash .byte 102,65,15,56,0,194 movdqu %xmm0,(%rdi) pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 .byte 0xf3,0xc3 .cfi_endproc .size gcm_ghash_ssse3,.-gcm_ghash_ssse3 .section .rodata .align 16 .Lreverse_bytes: .byte 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 .Llow4_mask: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f .text #endif
marvin-hansen/iggy-streaming-system
12,917
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/md5-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .align 16 .globl md5_block_asm_data_order .hidden md5_block_asm_data_order .type md5_block_asm_data_order,@function md5_block_asm_data_order: .cfi_startproc _CET_ENDBR pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset r12,-32 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset r14,-40 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset r15,-48 .Lprologue: movq %rdi,%rbp shlq $6,%rdx leaq (%rsi,%rdx,1),%rdi movl 0(%rbp),%eax movl 4(%rbp),%ebx movl 8(%rbp),%ecx movl 12(%rbp),%edx cmpq %rdi,%rsi je .Lend .Lloop: movl %eax,%r8d movl %ebx,%r9d movl %ecx,%r14d movl %edx,%r15d movl 0(%rsi),%r10d movl %edx,%r11d xorl %ecx,%r11d leal -680876936(%rax,%r10,1),%eax andl %ebx,%r11d xorl %edx,%r11d movl 4(%rsi),%r10d addl %r11d,%eax roll $7,%eax movl %ecx,%r11d addl %ebx,%eax xorl %ebx,%r11d leal -389564586(%rdx,%r10,1),%edx andl %eax,%r11d xorl %ecx,%r11d movl 8(%rsi),%r10d addl %r11d,%edx roll $12,%edx movl %ebx,%r11d addl %eax,%edx xorl %eax,%r11d leal 606105819(%rcx,%r10,1),%ecx andl %edx,%r11d xorl %ebx,%r11d movl 12(%rsi),%r10d addl %r11d,%ecx roll $17,%ecx movl %eax,%r11d addl %edx,%ecx xorl %edx,%r11d leal -1044525330(%rbx,%r10,1),%ebx andl %ecx,%r11d xorl %eax,%r11d movl 16(%rsi),%r10d addl %r11d,%ebx roll $22,%ebx movl %edx,%r11d addl %ecx,%ebx xorl %ecx,%r11d leal -176418897(%rax,%r10,1),%eax andl %ebx,%r11d xorl %edx,%r11d movl 20(%rsi),%r10d addl %r11d,%eax roll $7,%eax movl %ecx,%r11d addl %ebx,%eax xorl %ebx,%r11d leal 1200080426(%rdx,%r10,1),%edx andl %eax,%r11d xorl %ecx,%r11d movl 24(%rsi),%r10d addl %r11d,%edx roll $12,%edx movl %ebx,%r11d addl %eax,%edx xorl %eax,%r11d leal -1473231341(%rcx,%r10,1),%ecx andl %edx,%r11d xorl %ebx,%r11d movl 28(%rsi),%r10d addl %r11d,%ecx roll $17,%ecx movl %eax,%r11d addl %edx,%ecx xorl %edx,%r11d leal -45705983(%rbx,%r10,1),%ebx andl %ecx,%r11d xorl %eax,%r11d movl 32(%rsi),%r10d addl %r11d,%ebx roll $22,%ebx movl %edx,%r11d addl %ecx,%ebx xorl %ecx,%r11d leal 1770035416(%rax,%r10,1),%eax andl %ebx,%r11d xorl %edx,%r11d movl 36(%rsi),%r10d addl %r11d,%eax roll $7,%eax movl %ecx,%r11d addl %ebx,%eax xorl %ebx,%r11d leal -1958414417(%rdx,%r10,1),%edx andl %eax,%r11d xorl %ecx,%r11d movl 40(%rsi),%r10d addl %r11d,%edx roll $12,%edx movl %ebx,%r11d addl %eax,%edx xorl %eax,%r11d leal -42063(%rcx,%r10,1),%ecx andl %edx,%r11d xorl %ebx,%r11d movl 44(%rsi),%r10d addl %r11d,%ecx roll $17,%ecx movl %eax,%r11d addl %edx,%ecx xorl %edx,%r11d leal -1990404162(%rbx,%r10,1),%ebx andl %ecx,%r11d xorl %eax,%r11d movl 48(%rsi),%r10d addl %r11d,%ebx roll $22,%ebx movl %edx,%r11d addl %ecx,%ebx xorl %ecx,%r11d leal 1804603682(%rax,%r10,1),%eax andl %ebx,%r11d xorl %edx,%r11d movl 52(%rsi),%r10d addl %r11d,%eax roll $7,%eax movl %ecx,%r11d addl %ebx,%eax xorl %ebx,%r11d leal -40341101(%rdx,%r10,1),%edx andl %eax,%r11d xorl %ecx,%r11d movl 56(%rsi),%r10d addl %r11d,%edx roll $12,%edx movl %ebx,%r11d addl %eax,%edx xorl %eax,%r11d leal -1502002290(%rcx,%r10,1),%ecx andl %edx,%r11d xorl %ebx,%r11d movl 60(%rsi),%r10d addl %r11d,%ecx roll $17,%ecx movl %eax,%r11d addl %edx,%ecx xorl %edx,%r11d leal 1236535329(%rbx,%r10,1),%ebx andl %ecx,%r11d xorl %eax,%r11d movl 0(%rsi),%r10d addl %r11d,%ebx roll $22,%ebx movl %edx,%r11d addl %ecx,%ebx movl 4(%rsi),%r10d movl %edx,%r11d movl %edx,%r12d notl %r11d leal -165796510(%rax,%r10,1),%eax andl %ebx,%r12d andl %ecx,%r11d movl 24(%rsi),%r10d orl %r11d,%r12d movl %ecx,%r11d addl %r12d,%eax movl %ecx,%r12d roll $5,%eax addl %ebx,%eax notl %r11d leal -1069501632(%rdx,%r10,1),%edx andl %eax,%r12d andl %ebx,%r11d movl 44(%rsi),%r10d orl %r11d,%r12d movl %ebx,%r11d addl %r12d,%edx movl %ebx,%r12d roll $9,%edx addl %eax,%edx notl %r11d leal 643717713(%rcx,%r10,1),%ecx andl %edx,%r12d andl %eax,%r11d movl 0(%rsi),%r10d orl %r11d,%r12d movl %eax,%r11d addl %r12d,%ecx movl %eax,%r12d roll $14,%ecx addl %edx,%ecx notl %r11d leal -373897302(%rbx,%r10,1),%ebx andl %ecx,%r12d andl %edx,%r11d movl 20(%rsi),%r10d orl %r11d,%r12d movl %edx,%r11d addl %r12d,%ebx movl %edx,%r12d roll $20,%ebx addl %ecx,%ebx notl %r11d leal -701558691(%rax,%r10,1),%eax andl %ebx,%r12d andl %ecx,%r11d movl 40(%rsi),%r10d orl %r11d,%r12d movl %ecx,%r11d addl %r12d,%eax movl %ecx,%r12d roll $5,%eax addl %ebx,%eax notl %r11d leal 38016083(%rdx,%r10,1),%edx andl %eax,%r12d andl %ebx,%r11d movl 60(%rsi),%r10d orl %r11d,%r12d movl %ebx,%r11d addl %r12d,%edx movl %ebx,%r12d roll $9,%edx addl %eax,%edx notl %r11d leal -660478335(%rcx,%r10,1),%ecx andl %edx,%r12d andl %eax,%r11d movl 16(%rsi),%r10d orl %r11d,%r12d movl %eax,%r11d addl %r12d,%ecx movl %eax,%r12d roll $14,%ecx addl %edx,%ecx notl %r11d leal -405537848(%rbx,%r10,1),%ebx andl %ecx,%r12d andl %edx,%r11d movl 36(%rsi),%r10d orl %r11d,%r12d movl %edx,%r11d addl %r12d,%ebx movl %edx,%r12d roll $20,%ebx addl %ecx,%ebx notl %r11d leal 568446438(%rax,%r10,1),%eax andl %ebx,%r12d andl %ecx,%r11d movl 56(%rsi),%r10d orl %r11d,%r12d movl %ecx,%r11d addl %r12d,%eax movl %ecx,%r12d roll $5,%eax addl %ebx,%eax notl %r11d leal -1019803690(%rdx,%r10,1),%edx andl %eax,%r12d andl %ebx,%r11d movl 12(%rsi),%r10d orl %r11d,%r12d movl %ebx,%r11d addl %r12d,%edx movl %ebx,%r12d roll $9,%edx addl %eax,%edx notl %r11d leal -187363961(%rcx,%r10,1),%ecx andl %edx,%r12d andl %eax,%r11d movl 32(%rsi),%r10d orl %r11d,%r12d movl %eax,%r11d addl %r12d,%ecx movl %eax,%r12d roll $14,%ecx addl %edx,%ecx notl %r11d leal 1163531501(%rbx,%r10,1),%ebx andl %ecx,%r12d andl %edx,%r11d movl 52(%rsi),%r10d orl %r11d,%r12d movl %edx,%r11d addl %r12d,%ebx movl %edx,%r12d roll $20,%ebx addl %ecx,%ebx notl %r11d leal -1444681467(%rax,%r10,1),%eax andl %ebx,%r12d andl %ecx,%r11d movl 8(%rsi),%r10d orl %r11d,%r12d movl %ecx,%r11d addl %r12d,%eax movl %ecx,%r12d roll $5,%eax addl %ebx,%eax notl %r11d leal -51403784(%rdx,%r10,1),%edx andl %eax,%r12d andl %ebx,%r11d movl 28(%rsi),%r10d orl %r11d,%r12d movl %ebx,%r11d addl %r12d,%edx movl %ebx,%r12d roll $9,%edx addl %eax,%edx notl %r11d leal 1735328473(%rcx,%r10,1),%ecx andl %edx,%r12d andl %eax,%r11d movl 48(%rsi),%r10d orl %r11d,%r12d movl %eax,%r11d addl %r12d,%ecx movl %eax,%r12d roll $14,%ecx addl %edx,%ecx notl %r11d leal -1926607734(%rbx,%r10,1),%ebx andl %ecx,%r12d andl %edx,%r11d movl 0(%rsi),%r10d orl %r11d,%r12d movl %edx,%r11d addl %r12d,%ebx movl %edx,%r12d roll $20,%ebx addl %ecx,%ebx movl 20(%rsi),%r10d movl %ecx,%r11d leal -378558(%rax,%r10,1),%eax movl 32(%rsi),%r10d xorl %edx,%r11d xorl %ebx,%r11d addl %r11d,%eax roll $4,%eax movl %ebx,%r11d addl %ebx,%eax leal -2022574463(%rdx,%r10,1),%edx movl 44(%rsi),%r10d xorl %ecx,%r11d xorl %eax,%r11d addl %r11d,%edx roll $11,%edx movl %eax,%r11d addl %eax,%edx leal 1839030562(%rcx,%r10,1),%ecx movl 56(%rsi),%r10d xorl %ebx,%r11d xorl %edx,%r11d addl %r11d,%ecx roll $16,%ecx movl %edx,%r11d addl %edx,%ecx leal -35309556(%rbx,%r10,1),%ebx movl 4(%rsi),%r10d xorl %eax,%r11d xorl %ecx,%r11d addl %r11d,%ebx roll $23,%ebx movl %ecx,%r11d addl %ecx,%ebx leal -1530992060(%rax,%r10,1),%eax movl 16(%rsi),%r10d xorl %edx,%r11d xorl %ebx,%r11d addl %r11d,%eax roll $4,%eax movl %ebx,%r11d addl %ebx,%eax leal 1272893353(%rdx,%r10,1),%edx movl 28(%rsi),%r10d xorl %ecx,%r11d xorl %eax,%r11d addl %r11d,%edx roll $11,%edx movl %eax,%r11d addl %eax,%edx leal -155497632(%rcx,%r10,1),%ecx movl 40(%rsi),%r10d xorl %ebx,%r11d xorl %edx,%r11d addl %r11d,%ecx roll $16,%ecx movl %edx,%r11d addl %edx,%ecx leal -1094730640(%rbx,%r10,1),%ebx movl 52(%rsi),%r10d xorl %eax,%r11d xorl %ecx,%r11d addl %r11d,%ebx roll $23,%ebx movl %ecx,%r11d addl %ecx,%ebx leal 681279174(%rax,%r10,1),%eax movl 0(%rsi),%r10d xorl %edx,%r11d xorl %ebx,%r11d addl %r11d,%eax roll $4,%eax movl %ebx,%r11d addl %ebx,%eax leal -358537222(%rdx,%r10,1),%edx movl 12(%rsi),%r10d xorl %ecx,%r11d xorl %eax,%r11d addl %r11d,%edx roll $11,%edx movl %eax,%r11d addl %eax,%edx leal -722521979(%rcx,%r10,1),%ecx movl 24(%rsi),%r10d xorl %ebx,%r11d xorl %edx,%r11d addl %r11d,%ecx roll $16,%ecx movl %edx,%r11d addl %edx,%ecx leal 76029189(%rbx,%r10,1),%ebx movl 36(%rsi),%r10d xorl %eax,%r11d xorl %ecx,%r11d addl %r11d,%ebx roll $23,%ebx movl %ecx,%r11d addl %ecx,%ebx leal -640364487(%rax,%r10,1),%eax movl 48(%rsi),%r10d xorl %edx,%r11d xorl %ebx,%r11d addl %r11d,%eax roll $4,%eax movl %ebx,%r11d addl %ebx,%eax leal -421815835(%rdx,%r10,1),%edx movl 60(%rsi),%r10d xorl %ecx,%r11d xorl %eax,%r11d addl %r11d,%edx roll $11,%edx movl %eax,%r11d addl %eax,%edx leal 530742520(%rcx,%r10,1),%ecx movl 8(%rsi),%r10d xorl %ebx,%r11d xorl %edx,%r11d addl %r11d,%ecx roll $16,%ecx movl %edx,%r11d addl %edx,%ecx leal -995338651(%rbx,%r10,1),%ebx movl 0(%rsi),%r10d xorl %eax,%r11d xorl %ecx,%r11d addl %r11d,%ebx roll $23,%ebx movl %ecx,%r11d addl %ecx,%ebx movl 0(%rsi),%r10d movl $0xffffffff,%r11d xorl %edx,%r11d leal -198630844(%rax,%r10,1),%eax orl %ebx,%r11d xorl %ecx,%r11d addl %r11d,%eax movl 28(%rsi),%r10d movl $0xffffffff,%r11d roll $6,%eax xorl %ecx,%r11d addl %ebx,%eax leal 1126891415(%rdx,%r10,1),%edx orl %eax,%r11d xorl %ebx,%r11d addl %r11d,%edx movl 56(%rsi),%r10d movl $0xffffffff,%r11d roll $10,%edx xorl %ebx,%r11d addl %eax,%edx leal -1416354905(%rcx,%r10,1),%ecx orl %edx,%r11d xorl %eax,%r11d addl %r11d,%ecx movl 20(%rsi),%r10d movl $0xffffffff,%r11d roll $15,%ecx xorl %eax,%r11d addl %edx,%ecx leal -57434055(%rbx,%r10,1),%ebx orl %ecx,%r11d xorl %edx,%r11d addl %r11d,%ebx movl 48(%rsi),%r10d movl $0xffffffff,%r11d roll $21,%ebx xorl %edx,%r11d addl %ecx,%ebx leal 1700485571(%rax,%r10,1),%eax orl %ebx,%r11d xorl %ecx,%r11d addl %r11d,%eax movl 12(%rsi),%r10d movl $0xffffffff,%r11d roll $6,%eax xorl %ecx,%r11d addl %ebx,%eax leal -1894986606(%rdx,%r10,1),%edx orl %eax,%r11d xorl %ebx,%r11d addl %r11d,%edx movl 40(%rsi),%r10d movl $0xffffffff,%r11d roll $10,%edx xorl %ebx,%r11d addl %eax,%edx leal -1051523(%rcx,%r10,1),%ecx orl %edx,%r11d xorl %eax,%r11d addl %r11d,%ecx movl 4(%rsi),%r10d movl $0xffffffff,%r11d roll $15,%ecx xorl %eax,%r11d addl %edx,%ecx leal -2054922799(%rbx,%r10,1),%ebx orl %ecx,%r11d xorl %edx,%r11d addl %r11d,%ebx movl 32(%rsi),%r10d movl $0xffffffff,%r11d roll $21,%ebx xorl %edx,%r11d addl %ecx,%ebx leal 1873313359(%rax,%r10,1),%eax orl %ebx,%r11d xorl %ecx,%r11d addl %r11d,%eax movl 60(%rsi),%r10d movl $0xffffffff,%r11d roll $6,%eax xorl %ecx,%r11d addl %ebx,%eax leal -30611744(%rdx,%r10,1),%edx orl %eax,%r11d xorl %ebx,%r11d addl %r11d,%edx movl 24(%rsi),%r10d movl $0xffffffff,%r11d roll $10,%edx xorl %ebx,%r11d addl %eax,%edx leal -1560198380(%rcx,%r10,1),%ecx orl %edx,%r11d xorl %eax,%r11d addl %r11d,%ecx movl 52(%rsi),%r10d movl $0xffffffff,%r11d roll $15,%ecx xorl %eax,%r11d addl %edx,%ecx leal 1309151649(%rbx,%r10,1),%ebx orl %ecx,%r11d xorl %edx,%r11d addl %r11d,%ebx movl 16(%rsi),%r10d movl $0xffffffff,%r11d roll $21,%ebx xorl %edx,%r11d addl %ecx,%ebx leal -145523070(%rax,%r10,1),%eax orl %ebx,%r11d xorl %ecx,%r11d addl %r11d,%eax movl 44(%rsi),%r10d movl $0xffffffff,%r11d roll $6,%eax xorl %ecx,%r11d addl %ebx,%eax leal -1120210379(%rdx,%r10,1),%edx orl %eax,%r11d xorl %ebx,%r11d addl %r11d,%edx movl 8(%rsi),%r10d movl $0xffffffff,%r11d roll $10,%edx xorl %ebx,%r11d addl %eax,%edx leal 718787259(%rcx,%r10,1),%ecx orl %edx,%r11d xorl %eax,%r11d addl %r11d,%ecx movl 36(%rsi),%r10d movl $0xffffffff,%r11d roll $15,%ecx xorl %eax,%r11d addl %edx,%ecx leal -343485551(%rbx,%r10,1),%ebx orl %ecx,%r11d xorl %edx,%r11d addl %r11d,%ebx movl 0(%rsi),%r10d movl $0xffffffff,%r11d roll $21,%ebx xorl %edx,%r11d addl %ecx,%ebx addl %r8d,%eax addl %r9d,%ebx addl %r14d,%ecx addl %r15d,%edx addq $64,%rsi cmpq %rdi,%rsi jb .Lloop .Lend: movl %eax,0(%rbp) movl %ebx,4(%rbp) movl %ecx,8(%rbp) movl %edx,12(%rbp) movq (%rsp),%r15 .cfi_restore r15 movq 8(%rsp),%r14 .cfi_restore r14 movq 16(%rsp),%r12 .cfi_restore r12 movq 24(%rsp),%rbx .cfi_restore rbx movq 32(%rsp),%rbp .cfi_restore rbp addq $40,%rsp .cfi_adjust_cfa_offset -40 .Lepilogue: .byte 0xf3,0xc3 .cfi_endproc .size md5_block_asm_data_order,.-md5_block_asm_data_order #endif
marvin-hansen/iggy-streaming-system
120,432
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/aesni-xts-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl aes_hw_xts_encrypt_avx512 .hidden aes_hw_xts_encrypt_avx512 .hidden aes_hw_xts_encrypt_avx512 .type aes_hw_xts_encrypt_avx512,@function .align 32 aes_hw_xts_encrypt_avx512: .cfi_startproc .byte 243,15,30,250 pushq %rbp movq %rsp,%rbp subq $376,%rsp andq $0xffffffffffffffc0,%rsp movq %rbx,368(%rsp) movq $0x87,%r10 vmovdqu (%r9),%xmm1 vpxor %xmm4,%xmm4,%xmm4 vmovdqu (%r8),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqu (%rcx),%xmm2 vmovdqa %xmm2,128(%rsp) vmovdqu 16(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 16(%rcx),%xmm2 vmovdqa %xmm2,144(%rsp) vmovdqu 32(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 32(%rcx),%xmm2 vmovdqa %xmm2,160(%rsp) vmovdqu 48(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 48(%rcx),%xmm2 vmovdqa %xmm2,176(%rsp) vmovdqu 64(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 64(%rcx),%xmm2 vmovdqa %xmm2,192(%rsp) vmovdqu 80(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 80(%rcx),%xmm2 vmovdqa %xmm2,208(%rsp) vmovdqu 96(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 96(%rcx),%xmm2 vmovdqa %xmm2,224(%rsp) vmovdqu 112(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 112(%rcx),%xmm2 vmovdqa %xmm2,240(%rsp) vmovdqu 128(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 128(%rcx),%xmm2 vmovdqa %xmm2,256(%rsp) vmovdqu 144(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 144(%rcx),%xmm2 vmovdqa %xmm2,272(%rsp) vmovdqu 160(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 160(%rcx),%xmm2 vmovdqa %xmm2,288(%rsp) vmovdqu 176(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 176(%rcx),%xmm2 vmovdqa %xmm2,304(%rsp) vmovdqu 192(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 192(%rcx),%xmm2 vmovdqa %xmm2,320(%rsp) vmovdqu 208(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 208(%rcx),%xmm2 vmovdqa %xmm2,336(%rsp) vmovdqu 224(%r8),%xmm0 .byte 98,242,117,8,221,200 vmovdqu 224(%rcx),%xmm2 vmovdqa %xmm2,352(%rsp) vmovdqa %xmm1,(%rsp) cmpq $0x80,%rdx jl .L_less_than_128_bytes_hEgxyDlCngwrfFe vpbroadcastq %r10,%zmm25 cmpq $0x100,%rdx jge .L_start_by16_hEgxyDlCngwrfFe cmpq $0x80,%rdx jge .L_start_by8_hEgxyDlCngwrfFe .L_do_n_blocks_hEgxyDlCngwrfFe: cmpq $0x0,%rdx je .L_ret_hEgxyDlCngwrfFe cmpq $0x70,%rdx jge .L_remaining_num_blocks_is_7_hEgxyDlCngwrfFe cmpq $0x60,%rdx jge .L_remaining_num_blocks_is_6_hEgxyDlCngwrfFe cmpq $0x50,%rdx jge .L_remaining_num_blocks_is_5_hEgxyDlCngwrfFe cmpq $0x40,%rdx jge .L_remaining_num_blocks_is_4_hEgxyDlCngwrfFe cmpq $0x30,%rdx jge .L_remaining_num_blocks_is_3_hEgxyDlCngwrfFe cmpq $0x20,%rdx jge .L_remaining_num_blocks_is_2_hEgxyDlCngwrfFe cmpq $0x10,%rdx jge .L_remaining_num_blocks_is_1_hEgxyDlCngwrfFe vmovdqa %xmm0,%xmm8 vmovdqa %xmm9,%xmm0 jmp .L_steal_cipher_hEgxyDlCngwrfFe .L_remaining_num_blocks_is_7_hEgxyDlCngwrfFe: movq $0xffffffffffffffff,%r8 shrq $0x10,%r8 kmovq %r8,%k1 vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2{%k1} addq $0x70,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi){%k1} addq $0x70,%rsi vextracti32x4 $0x2,%zmm2,%xmm8 vextracti32x4 $0x3,%zmm10,%xmm0 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_hEgxyDlCngwrfFe .L_remaining_num_blocks_is_6_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%ymm2 addq $0x60,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %ymm2,64(%rsi) addq $0x60,%rsi vextracti32x4 $0x1,%zmm2,%xmm8 vextracti32x4 $0x2,%zmm10,%xmm0 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_hEgxyDlCngwrfFe .L_remaining_num_blocks_is_5_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 vmovdqu 64(%rdi),%xmm2 addq $0x50,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu %xmm2,64(%rsi) addq $0x50,%rsi movdqa %xmm2,%xmm8 vextracti32x4 $0x1,%zmm10,%xmm0 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_hEgxyDlCngwrfFe .L_remaining_num_blocks_is_4_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 addq $0x40,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) addq $0x40,%rsi vextracti32x4 $0x3,%zmm1,%xmm8 vextracti32x4 $0x0,%zmm10,%xmm0 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_hEgxyDlCngwrfFe .L_remaining_num_blocks_is_3_hEgxyDlCngwrfFe: vextracti32x4 $0x1,%zmm9,%xmm10 vextracti32x4 $0x2,%zmm9,%xmm11 vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm2 vmovdqu 32(%rdi),%xmm3 addq $0x30,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x30,%rsi vmovdqa %xmm3,%xmm8 vextracti32x4 $0x3,%zmm9,%xmm0 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_hEgxyDlCngwrfFe .L_remaining_num_blocks_is_2_hEgxyDlCngwrfFe: vextracti32x4 $0x1,%zmm9,%xmm10 vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm2 addq $0x20,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x20,%rsi vmovdqa %xmm2,%xmm8 vextracti32x4 $0x2,%zmm9,%xmm0 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_hEgxyDlCngwrfFe .L_remaining_num_blocks_is_1_hEgxyDlCngwrfFe: vmovdqu (%rdi),%xmm1 addq $0x10,%rdi vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $0x10,%rsi vmovdqa %xmm1,%xmm8 vextracti32x4 $0x1,%zmm9,%xmm0 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_hEgxyDlCngwrfFe .L_start_by16_hEgxyDlCngwrfFe: vbroadcasti32x4 (%rsp),%zmm0 vbroadcasti32x4 shufb_15_7(%rip),%zmm8 movq $0xaa,%r8 kmovq %r8,%k2 vpshufb %zmm8,%zmm0,%zmm1 vpsllvq const_dq3210(%rip),%zmm0,%zmm4 vpsrlvq const_dq5678(%rip),%zmm1,%zmm2 .byte 98,147,109,72,68,217,0 vpxorq %zmm2,%zmm4,%zmm4{%k2} vpxord %zmm4,%zmm3,%zmm9 vpsllvq const_dq7654(%rip),%zmm0,%zmm5 vpsrlvq const_dq1234(%rip),%zmm1,%zmm6 .byte 98,147,77,72,68,249,0 vpxorq %zmm6,%zmm5,%zmm5{%k2} vpxord %zmm5,%zmm7,%zmm10 vpsrldq $0xf,%zmm9,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm9,%zmm11 vpxord %zmm14,%zmm11,%zmm11 vpsrldq $0xf,%zmm10,%zmm15 .byte 98,131,5,72,68,193,0 vpslldq $0x1,%zmm10,%zmm12 vpxord %zmm16,%zmm12,%zmm12 .L_main_loop_run_16_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2 vmovdqu8 128(%rdi),%zmm3 vmovdqu8 192(%rdi),%zmm4 addq $0x100,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vpxorq %zmm11,%zmm3,%zmm3 vpxorq %zmm12,%zmm4,%zmm4 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vpxorq %zmm0,%zmm3,%zmm3 vpxorq %zmm0,%zmm4,%zmm4 vpsrldq $0xf,%zmm11,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm11,%zmm15 vpxord %zmm14,%zmm15,%zmm15 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vpsrldq $0xf,%zmm12,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm12,%zmm16 vpxord %zmm14,%zmm16,%zmm16 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vpsrldq $0xf,%zmm15,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm15,%zmm17 vpxord %zmm14,%zmm17,%zmm17 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vpsrldq $0xf,%zmm16,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm16,%zmm18 vpxord %zmm14,%zmm18,%zmm18 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 .byte 98,242,101,72,220,216 .byte 98,242,93,72,220,224 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 .byte 98,242,101,72,221,216 .byte 98,242,93,72,221,224 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vpxorq %zmm11,%zmm3,%zmm3 vpxorq %zmm12,%zmm4,%zmm4 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqa32 %zmm17,%zmm11 vmovdqa32 %zmm18,%zmm12 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi) vmovdqu8 %zmm3,128(%rsi) vmovdqu8 %zmm4,192(%rsi) addq $0x100,%rsi subq $0x100,%rdx cmpq $0x100,%rdx jge .L_main_loop_run_16_hEgxyDlCngwrfFe cmpq $0x80,%rdx jge .L_main_loop_run_8_hEgxyDlCngwrfFe vextracti32x4 $0x3,%zmm4,%xmm0 jmp .L_do_n_blocks_hEgxyDlCngwrfFe .L_start_by8_hEgxyDlCngwrfFe: vbroadcasti32x4 (%rsp),%zmm0 vbroadcasti32x4 shufb_15_7(%rip),%zmm8 movq $0xaa,%r8 kmovq %r8,%k2 vpshufb %zmm8,%zmm0,%zmm1 vpsllvq const_dq3210(%rip),%zmm0,%zmm4 vpsrlvq const_dq5678(%rip),%zmm1,%zmm2 .byte 98,147,109,72,68,217,0 vpxorq %zmm2,%zmm4,%zmm4{%k2} vpxord %zmm4,%zmm3,%zmm9 vpsllvq const_dq7654(%rip),%zmm0,%zmm5 vpsrlvq const_dq1234(%rip),%zmm1,%zmm6 .byte 98,147,77,72,68,249,0 vpxorq %zmm6,%zmm5,%zmm5{%k2} vpxord %zmm5,%zmm7,%zmm10 .L_main_loop_run_8_hEgxyDlCngwrfFe: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2 addq $0x80,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vpsrldq $0xf,%zmm9,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm9,%zmm15 vpxord %zmm14,%zmm15,%zmm15 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vpsrldq $0xf,%zmm10,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm10,%zmm16 vpxord %zmm14,%zmm16,%zmm16 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,220,200 .byte 98,242,109,72,220,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,221,200 .byte 98,242,109,72,221,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi) addq $0x80,%rsi subq $0x80,%rdx cmpq $0x80,%rdx jge .L_main_loop_run_8_hEgxyDlCngwrfFe vextracti32x4 $0x3,%zmm2,%xmm0 jmp .L_do_n_blocks_hEgxyDlCngwrfFe .L_steal_cipher_next_hEgxyDlCngwrfFe: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,(%rsp) movq %rbx,8(%rsp) vmovdqa (%rsp),%xmm0 .L_steal_cipher_hEgxyDlCngwrfFe: vmovdqa %xmm8,%xmm2 leaq vpshufb_shf_table(%rip),%rax vmovdqu (%rax,%rdx,1),%xmm10 vpshufb %xmm10,%xmm8,%xmm8 vmovdqu -16(%rdi,%rdx,1),%xmm3 vmovdqu %xmm8,-16(%rsi,%rdx,1) leaq vpshufb_shf_table(%rip),%rax addq $16,%rax subq %rdx,%rax vmovdqu (%rax),%xmm10 vpxor mask1(%rip),%xmm10,%xmm10 vpshufb %xmm10,%xmm3,%xmm3 vpblendvb %xmm10,%xmm2,%xmm3,%xmm3 vpxor %xmm0,%xmm3,%xmm8 vpxor 128(%rsp),%xmm8,%xmm8 .byte 98,114,61,8,220,132,36,144,0,0,0 .byte 98,114,61,8,220,132,36,160,0,0,0 .byte 98,114,61,8,220,132,36,176,0,0,0 .byte 98,114,61,8,220,132,36,192,0,0,0 .byte 98,114,61,8,220,132,36,208,0,0,0 .byte 98,114,61,8,220,132,36,224,0,0,0 .byte 98,114,61,8,220,132,36,240,0,0,0 .byte 98,114,61,8,220,132,36,0,1,0,0 .byte 98,114,61,8,220,132,36,16,1,0,0 .byte 98,114,61,8,220,132,36,32,1,0,0 .byte 98,114,61,8,220,132,36,48,1,0,0 .byte 98,114,61,8,220,132,36,64,1,0,0 .byte 98,114,61,8,220,132,36,80,1,0,0 .byte 98,114,61,8,221,132,36,96,1,0,0 vpxor %xmm0,%xmm8,%xmm8 vmovdqu %xmm8,-16(%rsi) .L_ret_hEgxyDlCngwrfFe: movq 368(%rsp),%rbx xorq %r8,%r8 movq %r8,368(%rsp) vpxorq %zmm0,%zmm0,%zmm0 vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) movq $0x3f,%r8 kmovq %r8,%k2 vmovdqa64 %zmm0,320(%rsp){%k2} movq %rbp,%rsp popq %rbp vzeroupper .byte 0xf3,0xc3 .L_less_than_128_bytes_hEgxyDlCngwrfFe: cmpq $0x10,%rdx jb .L_ret_hEgxyDlCngwrfFe movq %rdx,%r8 andq $0x70,%r8 cmpq $0x60,%r8 je .L_num_blocks_is_6_hEgxyDlCngwrfFe cmpq $0x50,%r8 je .L_num_blocks_is_5_hEgxyDlCngwrfFe cmpq $0x40,%r8 je .L_num_blocks_is_4_hEgxyDlCngwrfFe cmpq $0x30,%r8 je .L_num_blocks_is_3_hEgxyDlCngwrfFe cmpq $0x20,%r8 je .L_num_blocks_is_2_hEgxyDlCngwrfFe cmpq $0x10,%r8 je .L_num_blocks_is_1_hEgxyDlCngwrfFe .L_num_blocks_is_7_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,80(%rsp) movq %rbx,88(%rsp) vmovdqa 80(%rsp),%xmm14 vmovdqu 80(%rdi),%xmm6 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,96(%rsp) movq %rbx,104(%rsp) vmovdqa 96(%rsp),%xmm15 vmovdqu 96(%rdi),%xmm7 addq $0x70,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vpxor %xmm0,%xmm7,%xmm7 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 .byte 98,242,69,8,220,248 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 .byte 98,242,93,8,221,224 .byte 98,242,85,8,221,232 .byte 98,242,77,8,221,240 .byte 98,242,69,8,221,248 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) vmovdqu %xmm7,96(%rsi) addq $0x70,%rsi vmovdqa %xmm7,%xmm8 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_next_hEgxyDlCngwrfFe .L_num_blocks_is_6_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,80(%rsp) movq %rbx,88(%rsp) vmovdqa 80(%rsp),%xmm14 vmovdqu 80(%rdi),%xmm6 addq $0x60,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 .byte 98,242,77,8,220,240 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 .byte 98,242,93,8,221,224 .byte 98,242,85,8,221,232 .byte 98,242,77,8,221,240 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) addq $0x60,%rsi vmovdqa %xmm6,%xmm8 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_next_hEgxyDlCngwrfFe .L_num_blocks_is_5_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 addq $0x50,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 .byte 98,242,85,8,220,232 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 .byte 98,242,93,8,221,224 .byte 98,242,85,8,221,232 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) addq $0x50,%rsi vmovdqa %xmm5,%xmm8 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_next_hEgxyDlCngwrfFe .L_num_blocks_is_4_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 addq $0x40,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 .byte 98,242,93,8,220,224 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 .byte 98,242,93,8,221,224 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) addq $0x40,%rsi vmovdqa %xmm4,%xmm8 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_next_hEgxyDlCngwrfFe .L_num_blocks_is_3_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 addq $0x30,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 .byte 98,242,101,8,220,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 .byte 98,242,101,8,221,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x30,%rsi vmovdqa %xmm3,%xmm8 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_next_hEgxyDlCngwrfFe .L_num_blocks_is_2_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 addq $0x20,%rdi vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 .byte 98,242,109,8,220,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 .byte 98,242,109,8,221,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x20,%rsi vmovdqa %xmm2,%xmm8 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_next_hEgxyDlCngwrfFe .L_num_blocks_is_1_hEgxyDlCngwrfFe: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 addq $0x10,%rdi vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,220,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,221,200 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $0x10,%rsi vmovdqa %xmm1,%xmm8 andq $0xf,%rdx je .L_ret_hEgxyDlCngwrfFe jmp .L_steal_cipher_next_hEgxyDlCngwrfFe .cfi_endproc .globl aes_hw_xts_decrypt_avx512 .hidden aes_hw_xts_decrypt_avx512 .hidden aes_hw_xts_decrypt_avx512 .type aes_hw_xts_decrypt_avx512,@function .align 32 aes_hw_xts_decrypt_avx512: .cfi_startproc .byte 243,15,30,250 pushq %rbp movq %rsp,%rbp subq $376,%rsp andq $0xffffffffffffffc0,%rsp movq %rbx,368(%rsp) movq $0x87,%r10 vmovdqu (%r9),%xmm1 vpxor %xmm4,%xmm4,%xmm4 vmovdqu (%r8),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqu 224(%rcx),%xmm2 vmovdqa %xmm2,352(%rsp) vmovdqu 16(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 208(%rcx),%xmm2 vmovdqa %xmm2,336(%rsp) vmovdqu 32(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 192(%rcx),%xmm2 vmovdqa %xmm2,320(%rsp) vmovdqu 48(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 176(%rcx),%xmm2 vmovdqa %xmm2,304(%rsp) vmovdqu 64(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 160(%rcx),%xmm2 vmovdqa %xmm2,288(%rsp) vmovdqu 80(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 144(%rcx),%xmm2 vmovdqa %xmm2,272(%rsp) vmovdqu 96(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 128(%rcx),%xmm2 vmovdqa %xmm2,256(%rsp) vmovdqu 112(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 112(%rcx),%xmm2 vmovdqa %xmm2,240(%rsp) vmovdqu 128(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 96(%rcx),%xmm2 vmovdqa %xmm2,224(%rsp) vmovdqu 144(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 80(%rcx),%xmm2 vmovdqa %xmm2,208(%rsp) vmovdqu 160(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 64(%rcx),%xmm2 vmovdqa %xmm2,192(%rsp) vmovdqu 176(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 48(%rcx),%xmm2 vmovdqa %xmm2,176(%rsp) vmovdqu 192(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 32(%rcx),%xmm2 vmovdqa %xmm2,160(%rsp) vmovdqu 208(%r8),%xmm0 .byte 98,242,117,8,220,200 vmovdqu 16(%rcx),%xmm2 vmovdqa %xmm2,144(%rsp) vmovdqu 224(%r8),%xmm0 .byte 98,242,117,8,221,200 vmovdqu (%rcx),%xmm2 vmovdqa %xmm2,128(%rsp) vmovdqa %xmm1,(%rsp) cmpq $0x80,%rdx jb .L_less_than_128_bytes_amivrujEyduiFoi vpbroadcastq %r10,%zmm25 cmpq $0x100,%rdx jge .L_start_by16_amivrujEyduiFoi jmp .L_start_by8_amivrujEyduiFoi .L_do_n_blocks_amivrujEyduiFoi: cmpq $0x0,%rdx je .L_ret_amivrujEyduiFoi cmpq $0x70,%rdx jge .L_remaining_num_blocks_is_7_amivrujEyduiFoi cmpq $0x60,%rdx jge .L_remaining_num_blocks_is_6_amivrujEyduiFoi cmpq $0x50,%rdx jge .L_remaining_num_blocks_is_5_amivrujEyduiFoi cmpq $0x40,%rdx jge .L_remaining_num_blocks_is_4_amivrujEyduiFoi cmpq $0x30,%rdx jge .L_remaining_num_blocks_is_3_amivrujEyduiFoi cmpq $0x20,%rdx jge .L_remaining_num_blocks_is_2_amivrujEyduiFoi cmpq $0x10,%rdx jge .L_remaining_num_blocks_is_1_amivrujEyduiFoi vmovdqu %xmm5,%xmm1 vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,-16(%rsi) vmovdqa %xmm1,%xmm8 movq $0x1,%r8 kmovq %r8,%k1 vpsllq $0x3f,%xmm9,%xmm13 vpsraq $0x3f,%xmm13,%xmm14 vpandq %xmm25,%xmm14,%xmm5 vpxorq %xmm5,%xmm9,%xmm9{%k1} vpsrldq $0x8,%xmm9,%xmm10 .byte 98, 211, 181, 8, 115, 194, 1 vpslldq $0x8,%xmm13,%xmm13 vpxorq %xmm13,%xmm0,%xmm0 jmp .L_steal_cipher_amivrujEyduiFoi .L_remaining_num_blocks_is_7_amivrujEyduiFoi: movq $0xffffffffffffffff,%r8 shrq $0x10,%r8 kmovq %r8,%k1 vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2{%k1} addq $0x70,%rdi andq $0xf,%rdx je .L_done_7_remain_amivrujEyduiFoi vextracti32x4 $0x2,%zmm10,%xmm12 vextracti32x4 $0x3,%zmm10,%xmm13 vinserti32x4 $0x2,%xmm13,%zmm10,%zmm10 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi){%k1} addq $0x70,%rsi vextracti32x4 $0x2,%zmm2,%xmm8 vmovdqa %xmm12,%xmm0 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_7_remain_amivrujEyduiFoi: vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi){%k1} jmp .L_ret_amivrujEyduiFoi .L_remaining_num_blocks_is_6_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%ymm2 addq $0x60,%rdi andq $0xf,%rdx je .L_done_6_remain_amivrujEyduiFoi vextracti32x4 $0x1,%zmm10,%xmm12 vextracti32x4 $0x2,%zmm10,%xmm13 vinserti32x4 $0x1,%xmm13,%zmm10,%zmm10 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %ymm2,64(%rsi) addq $0x60,%rsi vextracti32x4 $0x1,%zmm2,%xmm8 vmovdqa %xmm12,%xmm0 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_6_remain_amivrujEyduiFoi: vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %ymm2,64(%rsi) jmp .L_ret_amivrujEyduiFoi .L_remaining_num_blocks_is_5_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 vmovdqu 64(%rdi),%xmm2 addq $0x50,%rdi andq $0xf,%rdx je .L_done_5_remain_amivrujEyduiFoi vmovdqa %xmm10,%xmm12 vextracti32x4 $0x1,%zmm10,%xmm10 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu %xmm2,64(%rsi) addq $0x50,%rsi vmovdqa %xmm2,%xmm8 vmovdqa %xmm12,%xmm0 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_5_remain_amivrujEyduiFoi: vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %xmm2,64(%rsi) jmp .L_ret_amivrujEyduiFoi .L_remaining_num_blocks_is_4_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 addq $0x40,%rdi andq $0xf,%rdx je .L_done_4_remain_amivrujEyduiFoi vextracti32x4 $0x3,%zmm9,%xmm12 vinserti32x4 $0x3,%xmm10,%zmm9,%zmm9 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) addq $0x40,%rsi vextracti32x4 $0x3,%zmm1,%xmm8 vmovdqa %xmm12,%xmm0 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_4_remain_amivrujEyduiFoi: vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) jmp .L_ret_amivrujEyduiFoi .L_remaining_num_blocks_is_3_amivrujEyduiFoi: vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm2 vmovdqu 32(%rdi),%xmm3 addq $0x30,%rdi andq $0xf,%rdx je .L_done_3_remain_amivrujEyduiFoi vextracti32x4 $0x2,%zmm9,%xmm13 vextracti32x4 $0x1,%zmm9,%xmm10 vextracti32x4 $0x3,%zmm9,%xmm11 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x30,%rsi vmovdqa %xmm3,%xmm8 vmovdqa %xmm13,%xmm0 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_3_remain_amivrujEyduiFoi: vextracti32x4 $0x1,%zmm9,%xmm10 vextracti32x4 $0x2,%zmm9,%xmm11 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) jmp .L_ret_amivrujEyduiFoi .L_remaining_num_blocks_is_2_amivrujEyduiFoi: vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm2 addq $0x20,%rdi andq $0xf,%rdx je .L_done_2_remain_amivrujEyduiFoi vextracti32x4 $0x2,%zmm9,%xmm10 vextracti32x4 $0x1,%zmm9,%xmm12 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x20,%rsi vmovdqa %xmm2,%xmm8 vmovdqa %xmm12,%xmm0 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_2_remain_amivrujEyduiFoi: vextracti32x4 $0x1,%zmm9,%xmm10 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) jmp .L_ret_amivrujEyduiFoi .L_remaining_num_blocks_is_1_amivrujEyduiFoi: vmovdqu (%rdi),%xmm1 addq $0x10,%rdi andq $0xf,%rdx je .L_done_1_remain_amivrujEyduiFoi vextracti32x4 $0x1,%zmm9,%xmm11 vpxor %xmm11,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm11,%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $0x10,%rsi vmovdqa %xmm1,%xmm8 vmovdqa %xmm9,%xmm0 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_1_remain_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) jmp .L_ret_amivrujEyduiFoi .L_start_by16_amivrujEyduiFoi: vbroadcasti32x4 (%rsp),%zmm0 vbroadcasti32x4 shufb_15_7(%rip),%zmm8 movq $0xaa,%r8 kmovq %r8,%k2 vpshufb %zmm8,%zmm0,%zmm1 vpsllvq const_dq3210(%rip),%zmm0,%zmm4 vpsrlvq const_dq5678(%rip),%zmm1,%zmm2 .byte 98,147,109,72,68,217,0 vpxorq %zmm2,%zmm4,%zmm4{%k2} vpxord %zmm4,%zmm3,%zmm9 vpsllvq const_dq7654(%rip),%zmm0,%zmm5 vpsrlvq const_dq1234(%rip),%zmm1,%zmm6 .byte 98,147,77,72,68,249,0 vpxorq %zmm6,%zmm5,%zmm5{%k2} vpxord %zmm5,%zmm7,%zmm10 vpsrldq $0xf,%zmm9,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm9,%zmm11 vpxord %zmm14,%zmm11,%zmm11 vpsrldq $0xf,%zmm10,%zmm15 .byte 98,131,5,72,68,193,0 vpslldq $0x1,%zmm10,%zmm12 vpxord %zmm16,%zmm12,%zmm12 .L_main_loop_run_16_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2 vmovdqu8 128(%rdi),%zmm3 vmovdqu8 192(%rdi),%zmm4 vmovdqu8 240(%rdi),%zmm5 addq $0x100,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vpxorq %zmm11,%zmm3,%zmm3 vpxorq %zmm12,%zmm4,%zmm4 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vpxorq %zmm0,%zmm3,%zmm3 vpxorq %zmm0,%zmm4,%zmm4 vpsrldq $0xf,%zmm11,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm11,%zmm15 vpxord %zmm14,%zmm15,%zmm15 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vpsrldq $0xf,%zmm12,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm12,%zmm16 vpxord %zmm14,%zmm16,%zmm16 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vpsrldq $0xf,%zmm15,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm15,%zmm17 vpxord %zmm14,%zmm17,%zmm17 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vpsrldq $0xf,%zmm16,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm16,%zmm18 vpxord %zmm14,%zmm18,%zmm18 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 .byte 98,242,101,72,222,216 .byte 98,242,93,72,222,224 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 .byte 98,242,101,72,223,216 .byte 98,242,93,72,223,224 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vpxorq %zmm11,%zmm3,%zmm3 vpxorq %zmm12,%zmm4,%zmm4 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqa32 %zmm17,%zmm11 vmovdqa32 %zmm18,%zmm12 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi) vmovdqu8 %zmm3,128(%rsi) vmovdqu8 %zmm4,192(%rsi) addq $0x100,%rsi subq $0x100,%rdx cmpq $0x100,%rdx jge .L_main_loop_run_16_amivrujEyduiFoi cmpq $0x80,%rdx jge .L_main_loop_run_8_amivrujEyduiFoi jmp .L_do_n_blocks_amivrujEyduiFoi .L_start_by8_amivrujEyduiFoi: vbroadcasti32x4 (%rsp),%zmm0 vbroadcasti32x4 shufb_15_7(%rip),%zmm8 movq $0xaa,%r8 kmovq %r8,%k2 vpshufb %zmm8,%zmm0,%zmm1 vpsllvq const_dq3210(%rip),%zmm0,%zmm4 vpsrlvq const_dq5678(%rip),%zmm1,%zmm2 .byte 98,147,109,72,68,217,0 vpxorq %zmm2,%zmm4,%zmm4{%k2} vpxord %zmm4,%zmm3,%zmm9 vpsllvq const_dq7654(%rip),%zmm0,%zmm5 vpsrlvq const_dq1234(%rip),%zmm1,%zmm6 .byte 98,147,77,72,68,249,0 vpxorq %zmm6,%zmm5,%zmm5{%k2} vpxord %zmm5,%zmm7,%zmm10 .L_main_loop_run_8_amivrujEyduiFoi: vmovdqu8 (%rdi),%zmm1 vmovdqu8 64(%rdi),%zmm2 vmovdqu8 112(%rdi),%xmm5 addq $0x80,%rdi vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vbroadcasti32x4 128(%rsp),%zmm0 vpxorq %zmm0,%zmm1,%zmm1 vpxorq %zmm0,%zmm2,%zmm2 vpsrldq $0xf,%zmm9,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm9,%zmm15 vpxord %zmm14,%zmm15,%zmm15 vbroadcasti32x4 144(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 160(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 176(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vpsrldq $0xf,%zmm10,%zmm13 .byte 98,19,21,72,68,241,0 vpslldq $0x1,%zmm10,%zmm16 vpxord %zmm14,%zmm16,%zmm16 vbroadcasti32x4 192(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 208(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 224(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 240(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 256(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 272(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 288(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 304(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 320(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 336(%rsp),%zmm0 .byte 98,242,117,72,222,200 .byte 98,242,109,72,222,208 vbroadcasti32x4 352(%rsp),%zmm0 .byte 98,242,117,72,223,200 .byte 98,242,109,72,223,208 vpxorq %zmm9,%zmm1,%zmm1 vpxorq %zmm10,%zmm2,%zmm2 vmovdqa32 %zmm15,%zmm9 vmovdqa32 %zmm16,%zmm10 vmovdqu8 %zmm1,(%rsi) vmovdqu8 %zmm2,64(%rsi) addq $0x80,%rsi subq $0x80,%rdx cmpq $0x80,%rdx jge .L_main_loop_run_8_amivrujEyduiFoi jmp .L_do_n_blocks_amivrujEyduiFoi .L_steal_cipher_amivrujEyduiFoi: vmovdqa %xmm8,%xmm2 leaq vpshufb_shf_table(%rip),%rax vmovdqu (%rax,%rdx,1),%xmm10 vpshufb %xmm10,%xmm8,%xmm8 vmovdqu -16(%rdi,%rdx,1),%xmm3 vmovdqu %xmm8,-16(%rsi,%rdx,1) leaq vpshufb_shf_table(%rip),%rax addq $16,%rax subq %rdx,%rax vmovdqu (%rax),%xmm10 vpxor mask1(%rip),%xmm10,%xmm10 vpshufb %xmm10,%xmm3,%xmm3 vpblendvb %xmm10,%xmm2,%xmm3,%xmm3 vpxor %xmm0,%xmm3,%xmm8 vpxor 128(%rsp),%xmm8,%xmm8 .byte 98,114,61,8,222,132,36,144,0,0,0 .byte 98,114,61,8,222,132,36,160,0,0,0 .byte 98,114,61,8,222,132,36,176,0,0,0 .byte 98,114,61,8,222,132,36,192,0,0,0 .byte 98,114,61,8,222,132,36,208,0,0,0 .byte 98,114,61,8,222,132,36,224,0,0,0 .byte 98,114,61,8,222,132,36,240,0,0,0 .byte 98,114,61,8,222,132,36,0,1,0,0 .byte 98,114,61,8,222,132,36,16,1,0,0 .byte 98,114,61,8,222,132,36,32,1,0,0 .byte 98,114,61,8,222,132,36,48,1,0,0 .byte 98,114,61,8,222,132,36,64,1,0,0 .byte 98,114,61,8,222,132,36,80,1,0,0 .byte 98,114,61,8,223,132,36,96,1,0,0 vpxor %xmm0,%xmm8,%xmm8 .L_done_amivrujEyduiFoi: vmovdqu %xmm8,-16(%rsi) .L_ret_amivrujEyduiFoi: movq 368(%rsp),%rbx xorq %r8,%r8 movq %r8,368(%rsp) vpxorq %zmm0,%zmm0,%zmm0 vmovdqa64 %zmm0,128(%rsp) vmovdqa64 %zmm0,192(%rsp) vmovdqa64 %zmm0,256(%rsp) movq $0x3f,%r8 kmovq %r8,%k2 vmovdqa64 %zmm0,320(%rsp){%k2} movq %rbp,%rsp popq %rbp vzeroupper .byte 0xf3,0xc3 .L_less_than_128_bytes_amivrujEyduiFoi: cmpq $0x10,%rdx jb .L_ret_amivrujEyduiFoi movq %rdx,%r8 andq $0x70,%r8 cmpq $0x60,%r8 je .L_num_blocks_is_6_amivrujEyduiFoi cmpq $0x50,%r8 je .L_num_blocks_is_5_amivrujEyduiFoi cmpq $0x40,%r8 je .L_num_blocks_is_4_amivrujEyduiFoi cmpq $0x30,%r8 je .L_num_blocks_is_3_amivrujEyduiFoi cmpq $0x20,%r8 je .L_num_blocks_is_2_amivrujEyduiFoi cmpq $0x10,%r8 je .L_num_blocks_is_1_amivrujEyduiFoi .L_num_blocks_is_7_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,80(%rsp) movq %rbx,88(%rsp) vmovdqa 80(%rsp),%xmm14 vmovdqu 80(%rdi),%xmm6 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,96(%rsp) movq %rbx,104(%rsp) vmovdqa 96(%rsp),%xmm15 vmovdqu 96(%rdi),%xmm7 addq $0x70,%rdi andq $0xf,%rdx je .L_done_7_amivrujEyduiFoi .L_steal_cipher_7_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm15,%xmm16 vmovdqa 16(%rsp),%xmm15 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vpxor %xmm0,%xmm7,%xmm7 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 .byte 98,242,77,8,223,240 .byte 98,242,69,8,223,248 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) addq $0x70,%rsi vmovdqa64 %xmm16,%xmm0 vmovdqa %xmm7,%xmm8 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_7_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vpxor %xmm0,%xmm7,%xmm7 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 .byte 98,242,69,8,222,248 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 .byte 98,242,77,8,223,240 .byte 98,242,69,8,223,248 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vpxor %xmm15,%xmm7,%xmm7 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) addq $0x70,%rsi vmovdqa %xmm7,%xmm8 jmp .L_done_amivrujEyduiFoi .L_num_blocks_is_6_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,80(%rsp) movq %rbx,88(%rsp) vmovdqa 80(%rsp),%xmm14 vmovdqu 80(%rdi),%xmm6 addq $0x60,%rdi andq $0xf,%rdx je .L_done_6_amivrujEyduiFoi .L_steal_cipher_6_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm14,%xmm15 vmovdqa 16(%rsp),%xmm14 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 .byte 98,242,77,8,223,240 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) addq $0x60,%rsi vmovdqa %xmm15,%xmm0 vmovdqa %xmm6,%xmm8 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_6_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vpxor %xmm0,%xmm6,%xmm6 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 .byte 98,242,77,8,222,240 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 .byte 98,242,77,8,223,240 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vpxor %xmm14,%xmm6,%xmm6 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) addq $0x60,%rsi vmovdqa %xmm6,%xmm8 jmp .L_done_amivrujEyduiFoi .L_num_blocks_is_5_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,64(%rsp) movq %rbx,72(%rsp) vmovdqa 64(%rsp),%xmm13 vmovdqu 64(%rdi),%xmm5 addq $0x50,%rdi andq $0xf,%rdx je .L_done_5_amivrujEyduiFoi .L_steal_cipher_5_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm13,%xmm14 vmovdqa 16(%rsp),%xmm13 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) addq $0x50,%rsi vmovdqa %xmm14,%xmm0 vmovdqa %xmm5,%xmm8 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_5_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vpxor %xmm0,%xmm5,%xmm5 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 .byte 98,242,85,8,222,232 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 .byte 98,242,85,8,223,232 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vpxor %xmm13,%xmm5,%xmm5 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) addq $0x50,%rsi vmovdqa %xmm5,%xmm8 jmp .L_done_amivrujEyduiFoi .L_num_blocks_is_4_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,48(%rsp) movq %rbx,56(%rsp) vmovdqa 48(%rsp),%xmm12 vmovdqu 48(%rdi),%xmm4 addq $0x40,%rdi andq $0xf,%rdx je .L_done_4_amivrujEyduiFoi .L_steal_cipher_4_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm12,%xmm13 vmovdqa 16(%rsp),%xmm12 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x40,%rsi vmovdqa %xmm13,%xmm0 vmovdqa %xmm4,%xmm8 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_4_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vpxor %xmm0,%xmm4,%xmm4 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 .byte 98,242,93,8,222,224 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 .byte 98,242,93,8,223,224 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vpxor %xmm12,%xmm4,%xmm4 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) addq $0x40,%rsi vmovdqa %xmm4,%xmm8 jmp .L_done_amivrujEyduiFoi .L_num_blocks_is_3_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,32(%rsp) movq %rbx,40(%rsp) vmovdqa 32(%rsp),%xmm11 vmovdqu 32(%rdi),%xmm3 addq $0x30,%rdi andq $0xf,%rdx je .L_done_3_amivrujEyduiFoi .L_steal_cipher_3_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm11,%xmm12 vmovdqa 16(%rsp),%xmm11 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x30,%rsi vmovdqa %xmm12,%xmm0 vmovdqa %xmm3,%xmm8 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_3_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vpxor %xmm0,%xmm3,%xmm3 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 .byte 98,242,101,8,222,216 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 .byte 98,242,101,8,223,216 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vpxor %xmm11,%xmm3,%xmm3 vmovdqu %xmm1,(%rsi) vmovdqu %xmm2,16(%rsi) addq $0x30,%rsi vmovdqa %xmm3,%xmm8 jmp .L_done_amivrujEyduiFoi .L_num_blocks_is_2_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa 16(%rsp),%xmm10 vmovdqu 16(%rdi),%xmm2 addq $0x20,%rdi andq $0xf,%rdx je .L_done_2_amivrujEyduiFoi .L_steal_cipher_2_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm10,%xmm11 vmovdqa 16(%rsp),%xmm10 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) addq $0x20,%rsi vmovdqa %xmm11,%xmm0 vmovdqa %xmm2,%xmm8 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_2_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vpxor %xmm0,%xmm2,%xmm2 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 .byte 98,242,109,8,222,208 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 .byte 98,242,109,8,223,208 vpxor %xmm9,%xmm1,%xmm1 vpxor %xmm10,%xmm2,%xmm2 vmovdqu %xmm1,(%rsi) addq $0x20,%rsi vmovdqa %xmm2,%xmm8 jmp .L_done_amivrujEyduiFoi .L_num_blocks_is_1_amivrujEyduiFoi: vmovdqa 0(%rsp),%xmm9 movq 0(%rsp),%rax movq 8(%rsp),%rbx vmovdqu 0(%rdi),%xmm1 addq $0x10,%rdi andq $0xf,%rdx je .L_done_1_amivrujEyduiFoi .L_steal_cipher_1_amivrujEyduiFoi: xorq %r11,%r11 shlq $1,%rax adcq %rbx,%rbx cmovcq %r10,%r11 xorq %r11,%rax movq %rax,16(%rsp) movq %rbx,24(%rsp) vmovdqa64 %xmm9,%xmm10 vmovdqa 16(%rsp),%xmm9 vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm9,%xmm1,%xmm1 addq $0x10,%rsi vmovdqa %xmm10,%xmm0 vmovdqa %xmm1,%xmm8 jmp .L_steal_cipher_amivrujEyduiFoi .L_done_1_amivrujEyduiFoi: vpxor %xmm9,%xmm1,%xmm1 vmovdqa 128(%rsp),%xmm0 vpxor %xmm0,%xmm1,%xmm1 vmovdqa 144(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 160(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 176(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 192(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 208(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 224(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 240(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 256(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 272(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 288(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 304(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 320(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 336(%rsp),%xmm0 .byte 98,242,117,8,222,200 vmovdqa 352(%rsp),%xmm0 .byte 98,242,117,8,223,200 vpxor %xmm9,%xmm1,%xmm1 addq $0x10,%rsi vmovdqa %xmm1,%xmm8 jmp .L_done_amivrujEyduiFoi .cfi_endproc .section .rodata .align 16 vpshufb_shf_table: .quad 0x8786858483828100, 0x8f8e8d8c8b8a8988 .quad 0x0706050403020100, 0x000e0d0c0b0a0908 mask1: .quad 0x8080808080808080, 0x8080808080808080 const_dq3210: .quad 0, 0, 1, 1, 2, 2, 3, 3 const_dq5678: .quad 8, 8, 7, 7, 6, 6, 5, 5 const_dq7654: .quad 4, 4, 5, 5, 6, 6, 7, 7 const_dq1234: .quad 4, 4, 3, 3, 2, 2, 1, 1 shufb_15_7: .byte 15, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 7, 0xff, 0xff .byte 0xff, 0xff, 0xff, 0xff, 0xff .text #endif #endif
marvin-hansen/iggy-streaming-system
58,872
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/x86_64-mont5.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .extern OPENSSL_ia32cap_P .hidden OPENSSL_ia32cap_P .globl bn_mul_mont_gather5 .hidden bn_mul_mont_gather5 .type bn_mul_mont_gather5,@function .align 64 bn_mul_mont_gather5: .cfi_startproc _CET_ENDBR movl %r9d,%r9d movq %rsp,%rax .cfi_def_cfa_register %rax testl $7,%r9d jnz .Lmul_enter #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%r11 movl 8(%r11),%r11d #endif jmp .Lmul4x_enter .align 16 .Lmul_enter: movd 8(%rsp),%xmm5 pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 negq %r9 movq %rsp,%r11 leaq -280(%rsp,%r9,8),%r10 negq %r9 andq $-1024,%r10 subq %r10,%r11 andq $-4096,%r11 leaq (%r10,%r11,1),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja .Lmul_page_walk jmp .Lmul_page_walk_done .Lmul_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja .Lmul_page_walk .Lmul_page_walk_done: leaq .Linc(%rip),%r10 movq %rax,8(%rsp,%r9,8) .cfi_escape 0x0f,0x0a,0x77,0x08,0x79,0x00,0x38,0x1e,0x22,0x06,0x23,0x08 .Lmul_body: leaq 128(%rdx),%r12 movdqa 0(%r10),%xmm0 movdqa 16(%r10),%xmm1 leaq 24-112(%rsp,%r9,8),%r10 andq $-16,%r10 pshufd $0,%xmm5,%xmm5 movdqa %xmm1,%xmm4 movdqa %xmm1,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 .byte 0x67 movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,112(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,128(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,144(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,160(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,176(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,192(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,208(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,224(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,240(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,256(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,272(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,288(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,304(%r10) paddd %xmm2,%xmm3 .byte 0x67 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,320(%r10) pcmpeqd %xmm5,%xmm3 movdqa %xmm2,336(%r10) pand 64(%r12),%xmm0 pand 80(%r12),%xmm1 pand 96(%r12),%xmm2 movdqa %xmm3,352(%r10) pand 112(%r12),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -128(%r12),%xmm4 movdqa -112(%r12),%xmm5 movdqa -96(%r12),%xmm2 pand 112(%r10),%xmm4 movdqa -80(%r12),%xmm3 pand 128(%r10),%xmm5 por %xmm4,%xmm0 pand 144(%r10),%xmm2 por %xmm5,%xmm1 pand 160(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -64(%r12),%xmm4 movdqa -48(%r12),%xmm5 movdqa -32(%r12),%xmm2 pand 176(%r10),%xmm4 movdqa -16(%r12),%xmm3 pand 192(%r10),%xmm5 por %xmm4,%xmm0 pand 208(%r10),%xmm2 por %xmm5,%xmm1 pand 224(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa 0(%r12),%xmm4 movdqa 16(%r12),%xmm5 movdqa 32(%r12),%xmm2 pand 240(%r10),%xmm4 movdqa 48(%r12),%xmm3 pand 256(%r10),%xmm5 por %xmm4,%xmm0 pand 272(%r10),%xmm2 por %xmm5,%xmm1 pand 288(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 por %xmm1,%xmm0 pshufd $0x4e,%xmm0,%xmm1 por %xmm1,%xmm0 leaq 256(%r12),%r12 .byte 102,72,15,126,195 movq (%r8),%r8 movq (%rsi),%rax xorq %r14,%r14 xorq %r15,%r15 movq %r8,%rbp mulq %rbx movq %rax,%r10 movq (%rcx),%rax imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%r13 leaq 1(%r15),%r15 jmp .L1st_enter .align 16 .L1st: addq %rax,%r13 movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%r13 movq %r10,%r11 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 .L1st_enter: mulq %rbx addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx leaq 1(%r15),%r15 movq %rdx,%r10 mulq %rbp cmpq %r9,%r15 jne .L1st addq %rax,%r13 adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,-16(%rsp,%r9,8) movq %rdx,%r13 movq %r10,%r11 xorq %rdx,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r9,8) movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 jmp .Louter .align 16 .Louter: leaq 24+128(%rsp,%r9,8),%rdx andq $-16,%rdx pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 movdqa -128(%r12),%xmm0 movdqa -112(%r12),%xmm1 movdqa -96(%r12),%xmm2 movdqa -80(%r12),%xmm3 pand -128(%rdx),%xmm0 pand -112(%rdx),%xmm1 por %xmm0,%xmm4 pand -96(%rdx),%xmm2 por %xmm1,%xmm5 pand -80(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa -64(%r12),%xmm0 movdqa -48(%r12),%xmm1 movdqa -32(%r12),%xmm2 movdqa -16(%r12),%xmm3 pand -64(%rdx),%xmm0 pand -48(%rdx),%xmm1 por %xmm0,%xmm4 pand -32(%rdx),%xmm2 por %xmm1,%xmm5 pand -16(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 0(%r12),%xmm0 movdqa 16(%r12),%xmm1 movdqa 32(%r12),%xmm2 movdqa 48(%r12),%xmm3 pand 0(%rdx),%xmm0 pand 16(%rdx),%xmm1 por %xmm0,%xmm4 pand 32(%rdx),%xmm2 por %xmm1,%xmm5 pand 48(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 64(%r12),%xmm0 movdqa 80(%r12),%xmm1 movdqa 96(%r12),%xmm2 movdqa 112(%r12),%xmm3 pand 64(%rdx),%xmm0 pand 80(%rdx),%xmm1 por %xmm0,%xmm4 pand 96(%rdx),%xmm2 por %xmm1,%xmm5 pand 112(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 por %xmm5,%xmm4 pshufd $0x4e,%xmm4,%xmm0 por %xmm4,%xmm0 leaq 256(%r12),%r12 movq (%rsi),%rax .byte 102,72,15,126,195 xorq %r15,%r15 movq %r8,%rbp movq (%rsp),%r10 mulq %rbx addq %rax,%r10 movq (%rcx),%rax adcq $0,%rdx imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq 8(%rsp),%r10 movq %rdx,%r13 leaq 1(%r15),%r15 jmp .Linner_enter .align 16 .Linner: addq %rax,%r13 movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 movq (%rsp,%r15,8),%r10 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 .Linner_enter: mulq %rbx addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx addq %r11,%r10 movq %rdx,%r11 adcq $0,%r11 leaq 1(%r15),%r15 mulq %rbp cmpq %r9,%r15 jne .Linner addq %rax,%r13 adcq $0,%rdx addq %r10,%r13 movq (%rsp,%r9,8),%r10 adcq $0,%rdx movq %r13,-16(%rsp,%r9,8) movq %rdx,%r13 xorq %rdx,%rdx addq %r11,%r13 adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r9,8) movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 cmpq %r9,%r14 jb .Louter xorq %r14,%r14 movq (%rsp),%rax leaq (%rsp),%rsi movq %r9,%r15 jmp .Lsub .align 16 .Lsub: sbbq (%rcx,%r14,8),%rax movq %rax,(%rdi,%r14,8) movq 8(%rsi,%r14,8),%rax leaq 1(%r14),%r14 decq %r15 jnz .Lsub sbbq $0,%rax movq $-1,%rbx xorq %rax,%rbx xorq %r14,%r14 movq %r9,%r15 .Lcopy: movq (%rdi,%r14,8),%rcx movq (%rsp,%r14,8),%rdx andq %rbx,%rcx andq %rax,%rdx movq %r14,(%rsp,%r14,8) orq %rcx,%rdx movq %rdx,(%rdi,%r14,8) leaq 1(%r14),%r14 subq $1,%r15 jnz .Lcopy movq 8(%rsp,%r9,8),%rsi .cfi_def_cfa %rsi,8 movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lmul_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_mul_mont_gather5,.-bn_mul_mont_gather5 .type bn_mul4x_mont_gather5,@function .align 32 bn_mul4x_mont_gather5: .cfi_startproc .byte 0x67 movq %rsp,%rax .cfi_def_cfa_register %rax .Lmul4x_enter: #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX andl $0x80108,%r11d cmpl $0x80108,%r11d je .Lmulx4x_enter #endif pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 .Lmul4x_prologue: .byte 0x67 shll $3,%r9d leaq (%r9,%r9,2),%r10 negq %r9 leaq -320(%rsp,%r9,2),%r11 movq %rsp,%rbp subq %rdi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb .Lmul4xsp_alt subq %r11,%rbp leaq -320(%rbp,%r9,2),%rbp jmp .Lmul4xsp_done .align 32 .Lmul4xsp_alt: leaq 4096-320(,%r9,2),%r10 leaq -320(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp .Lmul4xsp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lmul4x_page_walk jmp .Lmul4x_page_walk_done .Lmul4x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lmul4x_page_walk .Lmul4x_page_walk_done: negq %r9 movq %rax,40(%rsp) .cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08 .Lmul4x_body: call mul4x_internal movq 40(%rsp),%rsi .cfi_def_cfa %rsi,8 movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lmul4x_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_mul4x_mont_gather5,.-bn_mul4x_mont_gather5 .type mul4x_internal,@function .align 32 mul4x_internal: .cfi_startproc shlq $5,%r9 movd 8(%rax),%xmm5 leaq .Linc(%rip),%rax leaq 128(%rdx,%r9,1),%r13 shrq $5,%r9 movdqa 0(%rax),%xmm0 movdqa 16(%rax),%xmm1 leaq 88-112(%rsp,%r9,1),%r10 leaq 128(%rdx),%r12 pshufd $0,%xmm5,%xmm5 movdqa %xmm1,%xmm4 .byte 0x67,0x67 movdqa %xmm1,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 .byte 0x67 movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,112(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,128(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,144(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,160(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,176(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,192(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,208(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,224(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,240(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,256(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,272(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,288(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,304(%r10) paddd %xmm2,%xmm3 .byte 0x67 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,320(%r10) pcmpeqd %xmm5,%xmm3 movdqa %xmm2,336(%r10) pand 64(%r12),%xmm0 pand 80(%r12),%xmm1 pand 96(%r12),%xmm2 movdqa %xmm3,352(%r10) pand 112(%r12),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -128(%r12),%xmm4 movdqa -112(%r12),%xmm5 movdqa -96(%r12),%xmm2 pand 112(%r10),%xmm4 movdqa -80(%r12),%xmm3 pand 128(%r10),%xmm5 por %xmm4,%xmm0 pand 144(%r10),%xmm2 por %xmm5,%xmm1 pand 160(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -64(%r12),%xmm4 movdqa -48(%r12),%xmm5 movdqa -32(%r12),%xmm2 pand 176(%r10),%xmm4 movdqa -16(%r12),%xmm3 pand 192(%r10),%xmm5 por %xmm4,%xmm0 pand 208(%r10),%xmm2 por %xmm5,%xmm1 pand 224(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa 0(%r12),%xmm4 movdqa 16(%r12),%xmm5 movdqa 32(%r12),%xmm2 pand 240(%r10),%xmm4 movdqa 48(%r12),%xmm3 pand 256(%r10),%xmm5 por %xmm4,%xmm0 pand 272(%r10),%xmm2 por %xmm5,%xmm1 pand 288(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 por %xmm1,%xmm0 pshufd $0x4e,%xmm0,%xmm1 por %xmm1,%xmm0 leaq 256(%r12),%r12 .byte 102,72,15,126,195 movq %r13,16+8(%rsp) movq %rdi,56+8(%rsp) movq (%r8),%r8 movq (%rsi),%rax leaq (%rsi,%r9,1),%rsi negq %r9 movq %r8,%rbp mulq %rbx movq %rax,%r10 movq (%rcx),%rax imulq %r10,%rbp leaq 64+8(%rsp),%r14 movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi,%r9,1),%rax adcq $0,%rdx movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi,%r9,1),%rax adcq $0,%rdx addq %r11,%rdi leaq 32(%r9),%r15 leaq 32(%rcx),%rcx adcq $0,%rdx movq %rdi,(%r14) movq %rdx,%r13 jmp .L1st4x .align 32 .L1st4x: mulq %rbx addq %rax,%r10 movq -16(%rcx),%rax leaq 32(%r14),%r14 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,1),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r15,1),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%r14) movq %rdx,%r13 mulq %rbx addq %rax,%r10 movq 0(%rcx),%rax adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq 8(%rsi,%r15,1),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi,%r15,1),%rax adcq $0,%rdx addq %r11,%rdi leaq 32(%rcx),%rcx adcq $0,%rdx movq %rdi,(%r14) movq %rdx,%r13 addq $32,%r15 jnz .L1st4x mulq %rbx addq %rax,%r10 movq -16(%rcx),%rax leaq 32(%r14),%r14 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r9,1),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%r14) movq %rdx,%r13 leaq (%rcx,%r9,1),%rcx xorq %rdi,%rdi addq %r10,%r13 adcq $0,%rdi movq %r13,-8(%r14) jmp .Louter4x .align 32 .Louter4x: leaq 16+128(%r14),%rdx pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 movdqa -128(%r12),%xmm0 movdqa -112(%r12),%xmm1 movdqa -96(%r12),%xmm2 movdqa -80(%r12),%xmm3 pand -128(%rdx),%xmm0 pand -112(%rdx),%xmm1 por %xmm0,%xmm4 pand -96(%rdx),%xmm2 por %xmm1,%xmm5 pand -80(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa -64(%r12),%xmm0 movdqa -48(%r12),%xmm1 movdqa -32(%r12),%xmm2 movdqa -16(%r12),%xmm3 pand -64(%rdx),%xmm0 pand -48(%rdx),%xmm1 por %xmm0,%xmm4 pand -32(%rdx),%xmm2 por %xmm1,%xmm5 pand -16(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 0(%r12),%xmm0 movdqa 16(%r12),%xmm1 movdqa 32(%r12),%xmm2 movdqa 48(%r12),%xmm3 pand 0(%rdx),%xmm0 pand 16(%rdx),%xmm1 por %xmm0,%xmm4 pand 32(%rdx),%xmm2 por %xmm1,%xmm5 pand 48(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 64(%r12),%xmm0 movdqa 80(%r12),%xmm1 movdqa 96(%r12),%xmm2 movdqa 112(%r12),%xmm3 pand 64(%rdx),%xmm0 pand 80(%rdx),%xmm1 por %xmm0,%xmm4 pand 96(%rdx),%xmm2 por %xmm1,%xmm5 pand 112(%rdx),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 por %xmm5,%xmm4 pshufd $0x4e,%xmm4,%xmm0 por %xmm4,%xmm0 leaq 256(%r12),%r12 .byte 102,72,15,126,195 movq (%r14,%r9,1),%r10 movq %r8,%rbp mulq %rbx addq %rax,%r10 movq (%rcx),%rax adcq $0,%rdx imulq %r10,%rbp movq %rdx,%r11 movq %rdi,(%r14) leaq (%r14,%r9,1),%r14 mulq %rbp addq %rax,%r10 movq 8(%rsi,%r9,1),%rax adcq $0,%rdx movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx addq 8(%r14),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi,%r9,1),%rax adcq $0,%rdx addq %r11,%rdi leaq 32(%r9),%r15 leaq 32(%rcx),%rcx adcq $0,%rdx movq %rdx,%r13 jmp .Linner4x .align 32 .Linner4x: mulq %rbx addq %rax,%r10 movq -16(%rcx),%rax adcq $0,%rdx addq 16(%r14),%r10 leaq 32(%r14),%r14 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,1),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %rdi,-32(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx),%rax adcq $0,%rdx addq -8(%r14),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r15,1),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %r13,-24(%r14) movq %rdx,%r13 mulq %rbx addq %rax,%r10 movq 0(%rcx),%rax adcq $0,%rdx addq (%r14),%r10 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq 8(%rsi,%r15,1),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %rdi,-16(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx addq 8(%r14),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi,%r15,1),%rax adcq $0,%rdx addq %r11,%rdi leaq 32(%rcx),%rcx adcq $0,%rdx movq %r13,-8(%r14) movq %rdx,%r13 addq $32,%r15 jnz .Linner4x mulq %rbx addq %rax,%r10 movq -16(%rcx),%rax adcq $0,%rdx addq 16(%r14),%r10 leaq 32(%r14),%r14 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %rdi,-32(%r14) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq %rbp,%rax movq -8(%rcx),%rbp adcq $0,%rdx addq -8(%r14),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r9,1),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %r13,-24(%r14) movq %rdx,%r13 movq %rdi,-16(%r14) leaq (%rcx,%r9,1),%rcx xorq %rdi,%rdi addq %r10,%r13 adcq $0,%rdi addq (%r14),%r13 adcq $0,%rdi movq %r13,-8(%r14) cmpq 16+8(%rsp),%r12 jb .Louter4x xorq %rax,%rax subq %r13,%rbp adcq %r15,%r15 orq %r15,%rdi subq %rdi,%rax leaq (%r14,%r9,1),%rbx movq (%rcx),%r12 leaq (%rcx),%rbp movq %r9,%rcx sarq $3+2,%rcx movq 56+8(%rsp),%rdi decq %r12 xorq %r10,%r10 movq 8(%rbp),%r13 movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp .Lsqr4x_sub_entry .cfi_endproc .size mul4x_internal,.-mul4x_internal .globl bn_power5 .hidden bn_power5 .type bn_power5,@function .align 32 bn_power5: .cfi_startproc _CET_ENDBR movq %rsp,%rax .cfi_def_cfa_register %rax #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%r11 movl 8(%r11),%r11d andl $0x80108,%r11d cmpl $0x80108,%r11d je .Lpowerx5_enter #endif pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 .Lpower5_prologue: shll $3,%r9d leal (%r9,%r9,2),%r10d negq %r9 movq (%r8),%r8 leaq -320(%rsp,%r9,2),%r11 movq %rsp,%rbp subq %rdi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb .Lpwr_sp_alt subq %r11,%rbp leaq -320(%rbp,%r9,2),%rbp jmp .Lpwr_sp_done .align 32 .Lpwr_sp_alt: leaq 4096-320(,%r9,2),%r10 leaq -320(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp .Lpwr_sp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lpwr_page_walk jmp .Lpwr_page_walk_done .Lpwr_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lpwr_page_walk .Lpwr_page_walk_done: movq %r9,%r10 negq %r9 movq %r8,32(%rsp) movq %rax,40(%rsp) .cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08 .Lpower5_body: .byte 102,72,15,110,207 .byte 102,72,15,110,209 .byte 102,73,15,110,218 .byte 102,72,15,110,226 call __bn_sqr8x_internal call __bn_post4x_internal call __bn_sqr8x_internal call __bn_post4x_internal call __bn_sqr8x_internal call __bn_post4x_internal call __bn_sqr8x_internal call __bn_post4x_internal call __bn_sqr8x_internal call __bn_post4x_internal .byte 102,72,15,126,209 .byte 102,72,15,126,226 movq %rsi,%rdi movq 40(%rsp),%rax leaq 32(%rsp),%r8 call mul4x_internal movq 40(%rsp),%rsi .cfi_def_cfa %rsi,8 movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lpower5_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_power5,.-bn_power5 .globl bn_sqr8x_internal .hidden bn_sqr8x_internal .hidden bn_sqr8x_internal .type bn_sqr8x_internal,@function .align 32 bn_sqr8x_internal: __bn_sqr8x_internal: .cfi_startproc _CET_ENDBR leaq 32(%r10),%rbp leaq (%rsi,%r9,1),%rsi movq %r9,%rcx movq -32(%rsi,%rbp,1),%r14 leaq 48+8(%rsp,%r9,2),%rdi movq -24(%rsi,%rbp,1),%rax leaq -32(%rdi,%rbp,1),%rdi movq -16(%rsi,%rbp,1),%rbx movq %rax,%r15 mulq %r14 movq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 movq %r10,-24(%rdi,%rbp,1) mulq %r14 addq %rax,%r11 movq %rbx,%rax adcq $0,%rdx movq %r11,-16(%rdi,%rbp,1) movq %rdx,%r10 movq -8(%rsi,%rbp,1),%rbx mulq %r15 movq %rax,%r12 movq %rbx,%rax movq %rdx,%r13 leaq (%rbp),%rcx mulq %r14 addq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 adcq $0,%r11 addq %r12,%r10 adcq $0,%r11 movq %r10,-8(%rdi,%rcx,1) jmp .Lsqr4x_1st .align 32 .Lsqr4x_1st: movq (%rsi,%rcx,1),%rbx mulq %r15 addq %rax,%r13 movq %rbx,%rax movq %rdx,%r12 adcq $0,%r12 mulq %r14 addq %rax,%r11 movq %rbx,%rax movq 8(%rsi,%rcx,1),%rbx movq %rdx,%r10 adcq $0,%r10 addq %r13,%r11 adcq $0,%r10 mulq %r15 addq %rax,%r12 movq %rbx,%rax movq %r11,(%rdi,%rcx,1) movq %rdx,%r13 adcq $0,%r13 mulq %r14 addq %rax,%r10 movq %rbx,%rax movq 16(%rsi,%rcx,1),%rbx movq %rdx,%r11 adcq $0,%r11 addq %r12,%r10 adcq $0,%r11 mulq %r15 addq %rax,%r13 movq %rbx,%rax movq %r10,8(%rdi,%rcx,1) movq %rdx,%r12 adcq $0,%r12 mulq %r14 addq %rax,%r11 movq %rbx,%rax movq 24(%rsi,%rcx,1),%rbx movq %rdx,%r10 adcq $0,%r10 addq %r13,%r11 adcq $0,%r10 mulq %r15 addq %rax,%r12 movq %rbx,%rax movq %r11,16(%rdi,%rcx,1) movq %rdx,%r13 adcq $0,%r13 leaq 32(%rcx),%rcx mulq %r14 addq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 adcq $0,%r11 addq %r12,%r10 adcq $0,%r11 movq %r10,-8(%rdi,%rcx,1) cmpq $0,%rcx jne .Lsqr4x_1st mulq %r15 addq %rax,%r13 leaq 16(%rbp),%rbp adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,(%rdi) movq %rdx,%r12 movq %rdx,8(%rdi) jmp .Lsqr4x_outer .align 32 .Lsqr4x_outer: movq -32(%rsi,%rbp,1),%r14 leaq 48+8(%rsp,%r9,2),%rdi movq -24(%rsi,%rbp,1),%rax leaq -32(%rdi,%rbp,1),%rdi movq -16(%rsi,%rbp,1),%rbx movq %rax,%r15 mulq %r14 movq -24(%rdi,%rbp,1),%r10 addq %rax,%r10 movq %rbx,%rax adcq $0,%rdx movq %r10,-24(%rdi,%rbp,1) movq %rdx,%r11 mulq %r14 addq %rax,%r11 movq %rbx,%rax adcq $0,%rdx addq -16(%rdi,%rbp,1),%r11 movq %rdx,%r10 adcq $0,%r10 movq %r11,-16(%rdi,%rbp,1) xorq %r12,%r12 movq -8(%rsi,%rbp,1),%rbx mulq %r15 addq %rax,%r12 movq %rbx,%rax adcq $0,%rdx addq -8(%rdi,%rbp,1),%r12 movq %rdx,%r13 adcq $0,%r13 mulq %r14 addq %rax,%r10 movq %rbx,%rax adcq $0,%rdx addq %r12,%r10 movq %rdx,%r11 adcq $0,%r11 movq %r10,-8(%rdi,%rbp,1) leaq (%rbp),%rcx jmp .Lsqr4x_inner .align 32 .Lsqr4x_inner: movq (%rsi,%rcx,1),%rbx mulq %r15 addq %rax,%r13 movq %rbx,%rax movq %rdx,%r12 adcq $0,%r12 addq (%rdi,%rcx,1),%r13 adcq $0,%r12 .byte 0x67 mulq %r14 addq %rax,%r11 movq %rbx,%rax movq 8(%rsi,%rcx,1),%rbx movq %rdx,%r10 adcq $0,%r10 addq %r13,%r11 adcq $0,%r10 mulq %r15 addq %rax,%r12 movq %r11,(%rdi,%rcx,1) movq %rbx,%rax movq %rdx,%r13 adcq $0,%r13 addq 8(%rdi,%rcx,1),%r12 leaq 16(%rcx),%rcx adcq $0,%r13 mulq %r14 addq %rax,%r10 movq %rbx,%rax adcq $0,%rdx addq %r12,%r10 movq %rdx,%r11 adcq $0,%r11 movq %r10,-8(%rdi,%rcx,1) cmpq $0,%rcx jne .Lsqr4x_inner .byte 0x67 mulq %r15 addq %rax,%r13 adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,(%rdi) movq %rdx,%r12 movq %rdx,8(%rdi) addq $16,%rbp jnz .Lsqr4x_outer movq -32(%rsi),%r14 leaq 48+8(%rsp,%r9,2),%rdi movq -24(%rsi),%rax leaq -32(%rdi,%rbp,1),%rdi movq -16(%rsi),%rbx movq %rax,%r15 mulq %r14 addq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 adcq $0,%r11 mulq %r14 addq %rax,%r11 movq %rbx,%rax movq %r10,-24(%rdi) movq %rdx,%r10 adcq $0,%r10 addq %r13,%r11 movq -8(%rsi),%rbx adcq $0,%r10 mulq %r15 addq %rax,%r12 movq %rbx,%rax movq %r11,-16(%rdi) movq %rdx,%r13 adcq $0,%r13 mulq %r14 addq %rax,%r10 movq %rbx,%rax movq %rdx,%r11 adcq $0,%r11 addq %r12,%r10 adcq $0,%r11 movq %r10,-8(%rdi) mulq %r15 addq %rax,%r13 movq -16(%rsi),%rax adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,(%rdi) movq %rdx,%r12 movq %rdx,8(%rdi) mulq %rbx addq $16,%rbp xorq %r14,%r14 subq %r9,%rbp xorq %r15,%r15 addq %r12,%rax adcq $0,%rdx movq %rax,8(%rdi) movq %rdx,16(%rdi) movq %r15,24(%rdi) movq -16(%rsi,%rbp,1),%rax leaq 48+8(%rsp),%rdi xorq %r10,%r10 movq 8(%rdi),%r11 leaq (%r14,%r10,2),%r12 shrq $63,%r10 leaq (%rcx,%r11,2),%r13 shrq $63,%r11 orq %r10,%r13 movq 16(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 24(%rdi),%r11 adcq %rax,%r12 movq -8(%rsi,%rbp,1),%rax movq %r12,(%rdi) adcq %rdx,%r13 leaq (%r14,%r10,2),%rbx movq %r13,8(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r8 shrq $63,%r11 orq %r10,%r8 movq 32(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 40(%rdi),%r11 adcq %rax,%rbx movq 0(%rsi,%rbp,1),%rax movq %rbx,16(%rdi) adcq %rdx,%r8 leaq 16(%rbp),%rbp movq %r8,24(%rdi) sbbq %r15,%r15 leaq 64(%rdi),%rdi jmp .Lsqr4x_shift_n_add .align 32 .Lsqr4x_shift_n_add: leaq (%r14,%r10,2),%r12 shrq $63,%r10 leaq (%rcx,%r11,2),%r13 shrq $63,%r11 orq %r10,%r13 movq -16(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq -8(%rdi),%r11 adcq %rax,%r12 movq -8(%rsi,%rbp,1),%rax movq %r12,-32(%rdi) adcq %rdx,%r13 leaq (%r14,%r10,2),%rbx movq %r13,-24(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r8 shrq $63,%r11 orq %r10,%r8 movq 0(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 8(%rdi),%r11 adcq %rax,%rbx movq 0(%rsi,%rbp,1),%rax movq %rbx,-16(%rdi) adcq %rdx,%r8 leaq (%r14,%r10,2),%r12 movq %r8,-8(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r13 shrq $63,%r11 orq %r10,%r13 movq 16(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 24(%rdi),%r11 adcq %rax,%r12 movq 8(%rsi,%rbp,1),%rax movq %r12,0(%rdi) adcq %rdx,%r13 leaq (%r14,%r10,2),%rbx movq %r13,8(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r8 shrq $63,%r11 orq %r10,%r8 movq 32(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq 40(%rdi),%r11 adcq %rax,%rbx movq 16(%rsi,%rbp,1),%rax movq %rbx,16(%rdi) adcq %rdx,%r8 movq %r8,24(%rdi) sbbq %r15,%r15 leaq 64(%rdi),%rdi addq $32,%rbp jnz .Lsqr4x_shift_n_add leaq (%r14,%r10,2),%r12 .byte 0x67 shrq $63,%r10 leaq (%rcx,%r11,2),%r13 shrq $63,%r11 orq %r10,%r13 movq -16(%rdi),%r10 movq %r11,%r14 mulq %rax negq %r15 movq -8(%rdi),%r11 adcq %rax,%r12 movq -8(%rsi),%rax movq %r12,-32(%rdi) adcq %rdx,%r13 leaq (%r14,%r10,2),%rbx movq %r13,-24(%rdi) sbbq %r15,%r15 shrq $63,%r10 leaq (%rcx,%r11,2),%r8 shrq $63,%r11 orq %r10,%r8 mulq %rax negq %r15 adcq %rax,%rbx adcq %rdx,%r8 movq %rbx,-16(%rdi) movq %r8,-8(%rdi) .byte 102,72,15,126,213 __bn_sqr8x_reduction: xorq %rax,%rax leaq (%r9,%rbp,1),%rcx leaq 48+8(%rsp,%r9,2),%rdx movq %rcx,0+8(%rsp) leaq 48+8(%rsp,%r9,1),%rdi movq %rdx,8+8(%rsp) negq %r9 jmp .L8x_reduction_loop .align 32 .L8x_reduction_loop: leaq (%rdi,%r9,1),%rdi .byte 0x66 movq 0(%rdi),%rbx movq 8(%rdi),%r9 movq 16(%rdi),%r10 movq 24(%rdi),%r11 movq 32(%rdi),%r12 movq 40(%rdi),%r13 movq 48(%rdi),%r14 movq 56(%rdi),%r15 movq %rax,(%rdx) leaq 64(%rdi),%rdi .byte 0x67 movq %rbx,%r8 imulq 32+8(%rsp),%rbx movq 0(%rbp),%rax movl $8,%ecx jmp .L8x_reduce .align 32 .L8x_reduce: mulq %rbx movq 8(%rbp),%rax negq %r8 movq %rdx,%r8 adcq $0,%r8 mulq %rbx addq %rax,%r9 movq 16(%rbp),%rax adcq $0,%rdx addq %r9,%r8 movq %rbx,48-8+8(%rsp,%rcx,8) movq %rdx,%r9 adcq $0,%r9 mulq %rbx addq %rax,%r10 movq 24(%rbp),%rax adcq $0,%rdx addq %r10,%r9 movq 32+8(%rsp),%rsi movq %rdx,%r10 adcq $0,%r10 mulq %rbx addq %rax,%r11 movq 32(%rbp),%rax adcq $0,%rdx imulq %r8,%rsi addq %r11,%r10 movq %rdx,%r11 adcq $0,%r11 mulq %rbx addq %rax,%r12 movq 40(%rbp),%rax adcq $0,%rdx addq %r12,%r11 movq %rdx,%r12 adcq $0,%r12 mulq %rbx addq %rax,%r13 movq 48(%rbp),%rax adcq $0,%rdx addq %r13,%r12 movq %rdx,%r13 adcq $0,%r13 mulq %rbx addq %rax,%r14 movq 56(%rbp),%rax adcq $0,%rdx addq %r14,%r13 movq %rdx,%r14 adcq $0,%r14 mulq %rbx movq %rsi,%rbx addq %rax,%r15 movq 0(%rbp),%rax adcq $0,%rdx addq %r15,%r14 movq %rdx,%r15 adcq $0,%r15 decl %ecx jnz .L8x_reduce leaq 64(%rbp),%rbp xorq %rax,%rax movq 8+8(%rsp),%rdx cmpq 0+8(%rsp),%rbp jae .L8x_no_tail .byte 0x66 addq 0(%rdi),%r8 adcq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 sbbq %rsi,%rsi movq 48+56+8(%rsp),%rbx movl $8,%ecx movq 0(%rbp),%rax jmp .L8x_tail .align 32 .L8x_tail: mulq %rbx addq %rax,%r8 movq 8(%rbp),%rax movq %r8,(%rdi) movq %rdx,%r8 adcq $0,%r8 mulq %rbx addq %rax,%r9 movq 16(%rbp),%rax adcq $0,%rdx addq %r9,%r8 leaq 8(%rdi),%rdi movq %rdx,%r9 adcq $0,%r9 mulq %rbx addq %rax,%r10 movq 24(%rbp),%rax adcq $0,%rdx addq %r10,%r9 movq %rdx,%r10 adcq $0,%r10 mulq %rbx addq %rax,%r11 movq 32(%rbp),%rax adcq $0,%rdx addq %r11,%r10 movq %rdx,%r11 adcq $0,%r11 mulq %rbx addq %rax,%r12 movq 40(%rbp),%rax adcq $0,%rdx addq %r12,%r11 movq %rdx,%r12 adcq $0,%r12 mulq %rbx addq %rax,%r13 movq 48(%rbp),%rax adcq $0,%rdx addq %r13,%r12 movq %rdx,%r13 adcq $0,%r13 mulq %rbx addq %rax,%r14 movq 56(%rbp),%rax adcq $0,%rdx addq %r14,%r13 movq %rdx,%r14 adcq $0,%r14 mulq %rbx movq 48-16+8(%rsp,%rcx,8),%rbx addq %rax,%r15 adcq $0,%rdx addq %r15,%r14 movq 0(%rbp),%rax movq %rdx,%r15 adcq $0,%r15 decl %ecx jnz .L8x_tail leaq 64(%rbp),%rbp movq 8+8(%rsp),%rdx cmpq 0+8(%rsp),%rbp jae .L8x_tail_done movq 48+56+8(%rsp),%rbx negq %rsi movq 0(%rbp),%rax adcq 0(%rdi),%r8 adcq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 sbbq %rsi,%rsi movl $8,%ecx jmp .L8x_tail .align 32 .L8x_tail_done: xorq %rax,%rax addq (%rdx),%r8 adcq $0,%r9 adcq $0,%r10 adcq $0,%r11 adcq $0,%r12 adcq $0,%r13 adcq $0,%r14 adcq $0,%r15 adcq $0,%rax negq %rsi .L8x_no_tail: adcq 0(%rdi),%r8 adcq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 adcq $0,%rax movq -8(%rbp),%rcx xorq %rsi,%rsi .byte 102,72,15,126,213 movq %r8,0(%rdi) movq %r9,8(%rdi) .byte 102,73,15,126,217 movq %r10,16(%rdi) movq %r11,24(%rdi) movq %r12,32(%rdi) movq %r13,40(%rdi) movq %r14,48(%rdi) movq %r15,56(%rdi) leaq 64(%rdi),%rdi cmpq %rdx,%rdi jb .L8x_reduction_loop .byte 0xf3,0xc3 .cfi_endproc .size bn_sqr8x_internal,.-bn_sqr8x_internal .type __bn_post4x_internal,@function .align 32 __bn_post4x_internal: .cfi_startproc movq 0(%rbp),%r12 leaq (%rdi,%r9,1),%rbx movq %r9,%rcx .byte 102,72,15,126,207 negq %rax .byte 102,72,15,126,206 sarq $3+2,%rcx decq %r12 xorq %r10,%r10 movq 8(%rbp),%r13 movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp .Lsqr4x_sub_entry .align 16 .Lsqr4x_sub: movq 0(%rbp),%r12 movq 8(%rbp),%r13 movq 16(%rbp),%r14 movq 24(%rbp),%r15 .Lsqr4x_sub_entry: leaq 32(%rbp),%rbp notq %r12 notq %r13 notq %r14 notq %r15 andq %rax,%r12 andq %rax,%r13 andq %rax,%r14 andq %rax,%r15 negq %r10 adcq 0(%rbx),%r12 adcq 8(%rbx),%r13 adcq 16(%rbx),%r14 adcq 24(%rbx),%r15 movq %r12,0(%rdi) leaq 32(%rbx),%rbx movq %r13,8(%rdi) sbbq %r10,%r10 movq %r14,16(%rdi) movq %r15,24(%rdi) leaq 32(%rdi),%rdi incq %rcx jnz .Lsqr4x_sub movq %r9,%r10 negq %r9 .byte 0xf3,0xc3 .cfi_endproc .size __bn_post4x_internal,.-__bn_post4x_internal #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .type bn_mulx4x_mont_gather5,@function .align 32 bn_mulx4x_mont_gather5: .cfi_startproc movq %rsp,%rax .cfi_def_cfa_register %rax .Lmulx4x_enter: pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 .Lmulx4x_prologue: shll $3,%r9d leaq (%r9,%r9,2),%r10 negq %r9 movq (%r8),%r8 leaq -320(%rsp,%r9,2),%r11 movq %rsp,%rbp subq %rdi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb .Lmulx4xsp_alt subq %r11,%rbp leaq -320(%rbp,%r9,2),%rbp jmp .Lmulx4xsp_done .Lmulx4xsp_alt: leaq 4096-320(,%r9,2),%r10 leaq -320(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp .Lmulx4xsp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lmulx4x_page_walk jmp .Lmulx4x_page_walk_done .Lmulx4x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lmulx4x_page_walk .Lmulx4x_page_walk_done: movq %r8,32(%rsp) movq %rax,40(%rsp) .cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08 .Lmulx4x_body: call mulx4x_internal movq 40(%rsp),%rsi .cfi_def_cfa %rsi,8 movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lmulx4x_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_mulx4x_mont_gather5,.-bn_mulx4x_mont_gather5 .type mulx4x_internal,@function .align 32 mulx4x_internal: .cfi_startproc movq %r9,8(%rsp) movq %r9,%r10 negq %r9 shlq $5,%r9 negq %r10 leaq 128(%rdx,%r9,1),%r13 shrq $5+5,%r9 movd 8(%rax),%xmm5 subq $1,%r9 leaq .Linc(%rip),%rax movq %r13,16+8(%rsp) movq %r9,24+8(%rsp) movq %rdi,56+8(%rsp) movdqa 0(%rax),%xmm0 movdqa 16(%rax),%xmm1 leaq 88-112(%rsp,%r10,1),%r10 leaq 128(%rdx),%rdi pshufd $0,%xmm5,%xmm5 movdqa %xmm1,%xmm4 .byte 0x67 movdqa %xmm1,%xmm2 .byte 0x67 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,112(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,128(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,144(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,160(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,176(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,192(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,208(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,224(%r10) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,240(%r10) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,256(%r10) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,272(%r10) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,288(%r10) movdqa %xmm4,%xmm3 .byte 0x67 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,304(%r10) paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,320(%r10) pcmpeqd %xmm5,%xmm3 movdqa %xmm2,336(%r10) pand 64(%rdi),%xmm0 pand 80(%rdi),%xmm1 pand 96(%rdi),%xmm2 movdqa %xmm3,352(%r10) pand 112(%rdi),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -128(%rdi),%xmm4 movdqa -112(%rdi),%xmm5 movdqa -96(%rdi),%xmm2 pand 112(%r10),%xmm4 movdqa -80(%rdi),%xmm3 pand 128(%r10),%xmm5 por %xmm4,%xmm0 pand 144(%r10),%xmm2 por %xmm5,%xmm1 pand 160(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa -64(%rdi),%xmm4 movdqa -48(%rdi),%xmm5 movdqa -32(%rdi),%xmm2 pand 176(%r10),%xmm4 movdqa -16(%rdi),%xmm3 pand 192(%r10),%xmm5 por %xmm4,%xmm0 pand 208(%r10),%xmm2 por %xmm5,%xmm1 pand 224(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 movdqa 0(%rdi),%xmm4 movdqa 16(%rdi),%xmm5 movdqa 32(%rdi),%xmm2 pand 240(%r10),%xmm4 movdqa 48(%rdi),%xmm3 pand 256(%r10),%xmm5 por %xmm4,%xmm0 pand 272(%r10),%xmm2 por %xmm5,%xmm1 pand 288(%r10),%xmm3 por %xmm2,%xmm0 por %xmm3,%xmm1 pxor %xmm1,%xmm0 pshufd $0x4e,%xmm0,%xmm1 por %xmm1,%xmm0 leaq 256(%rdi),%rdi .byte 102,72,15,126,194 leaq 64+32+8(%rsp),%rbx movq %rdx,%r9 mulxq 0(%rsi),%r8,%rax mulxq 8(%rsi),%r11,%r12 addq %rax,%r11 mulxq 16(%rsi),%rax,%r13 adcq %rax,%r12 adcq $0,%r13 mulxq 24(%rsi),%rax,%r14 movq %r8,%r15 imulq 32+8(%rsp),%r8 xorq %rbp,%rbp movq %r8,%rdx movq %rdi,8+8(%rsp) leaq 32(%rsi),%rsi adcxq %rax,%r13 adcxq %rbp,%r14 mulxq 0(%rcx),%rax,%r10 adcxq %rax,%r15 adoxq %r11,%r10 mulxq 8(%rcx),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 mulxq 16(%rcx),%rax,%r12 movq 24+8(%rsp),%rdi movq %r10,-32(%rbx) adcxq %rax,%r11 adoxq %r13,%r12 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r11,-24(%rbx) adcxq %rax,%r12 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r12,-16(%rbx) jmp .Lmulx4x_1st .align 32 .Lmulx4x_1st: adcxq %rbp,%r15 mulxq 0(%rsi),%r10,%rax adcxq %r14,%r10 mulxq 8(%rsi),%r11,%r14 adcxq %rax,%r11 mulxq 16(%rsi),%r12,%rax adcxq %r14,%r12 mulxq 24(%rsi),%r13,%r14 .byte 0x67,0x67 movq %r8,%rdx adcxq %rax,%r13 adcxq %rbp,%r14 leaq 32(%rsi),%rsi leaq 32(%rbx),%rbx adoxq %r15,%r10 mulxq 0(%rcx),%rax,%r15 adcxq %rax,%r10 adoxq %r15,%r11 mulxq 8(%rcx),%rax,%r15 adcxq %rax,%r11 adoxq %r15,%r12 mulxq 16(%rcx),%rax,%r15 movq %r10,-40(%rbx) adcxq %rax,%r12 movq %r11,-32(%rbx) adoxq %r15,%r13 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r12,-24(%rbx) adcxq %rax,%r13 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r13,-16(%rbx) decq %rdi jnz .Lmulx4x_1st movq 8(%rsp),%rax adcq %rbp,%r15 leaq (%rsi,%rax,1),%rsi addq %r15,%r14 movq 8+8(%rsp),%rdi adcq %rbp,%rbp movq %r14,-8(%rbx) jmp .Lmulx4x_outer .align 32 .Lmulx4x_outer: leaq 16-256(%rbx),%r10 pxor %xmm4,%xmm4 .byte 0x67,0x67 pxor %xmm5,%xmm5 movdqa -128(%rdi),%xmm0 movdqa -112(%rdi),%xmm1 movdqa -96(%rdi),%xmm2 pand 256(%r10),%xmm0 movdqa -80(%rdi),%xmm3 pand 272(%r10),%xmm1 por %xmm0,%xmm4 pand 288(%r10),%xmm2 por %xmm1,%xmm5 pand 304(%r10),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa -64(%rdi),%xmm0 movdqa -48(%rdi),%xmm1 movdqa -32(%rdi),%xmm2 pand 320(%r10),%xmm0 movdqa -16(%rdi),%xmm3 pand 336(%r10),%xmm1 por %xmm0,%xmm4 pand 352(%r10),%xmm2 por %xmm1,%xmm5 pand 368(%r10),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 0(%rdi),%xmm0 movdqa 16(%rdi),%xmm1 movdqa 32(%rdi),%xmm2 pand 384(%r10),%xmm0 movdqa 48(%rdi),%xmm3 pand 400(%r10),%xmm1 por %xmm0,%xmm4 pand 416(%r10),%xmm2 por %xmm1,%xmm5 pand 432(%r10),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 64(%rdi),%xmm0 movdqa 80(%rdi),%xmm1 movdqa 96(%rdi),%xmm2 pand 448(%r10),%xmm0 movdqa 112(%rdi),%xmm3 pand 464(%r10),%xmm1 por %xmm0,%xmm4 pand 480(%r10),%xmm2 por %xmm1,%xmm5 pand 496(%r10),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 por %xmm5,%xmm4 pshufd $0x4e,%xmm4,%xmm0 por %xmm4,%xmm0 leaq 256(%rdi),%rdi .byte 102,72,15,126,194 movq %rbp,(%rbx) leaq 32(%rbx,%rax,1),%rbx mulxq 0(%rsi),%r8,%r11 xorq %rbp,%rbp movq %rdx,%r9 mulxq 8(%rsi),%r14,%r12 adoxq -32(%rbx),%r8 adcxq %r14,%r11 mulxq 16(%rsi),%r15,%r13 adoxq -24(%rbx),%r11 adcxq %r15,%r12 mulxq 24(%rsi),%rdx,%r14 adoxq -16(%rbx),%r12 adcxq %rdx,%r13 leaq (%rcx,%rax,1),%rcx leaq 32(%rsi),%rsi adoxq -8(%rbx),%r13 adcxq %rbp,%r14 adoxq %rbp,%r14 movq %r8,%r15 imulq 32+8(%rsp),%r8 movq %r8,%rdx xorq %rbp,%rbp movq %rdi,8+8(%rsp) mulxq 0(%rcx),%rax,%r10 adcxq %rax,%r15 adoxq %r11,%r10 mulxq 8(%rcx),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 mulxq 16(%rcx),%rax,%r12 adcxq %rax,%r11 adoxq %r13,%r12 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq 24+8(%rsp),%rdi movq %r10,-32(%rbx) adcxq %rax,%r12 movq %r11,-24(%rbx) adoxq %rbp,%r15 movq %r12,-16(%rbx) leaq 32(%rcx),%rcx jmp .Lmulx4x_inner .align 32 .Lmulx4x_inner: mulxq 0(%rsi),%r10,%rax adcxq %rbp,%r15 adoxq %r14,%r10 mulxq 8(%rsi),%r11,%r14 adcxq 0(%rbx),%r10 adoxq %rax,%r11 mulxq 16(%rsi),%r12,%rax adcxq 8(%rbx),%r11 adoxq %r14,%r12 mulxq 24(%rsi),%r13,%r14 movq %r8,%rdx adcxq 16(%rbx),%r12 adoxq %rax,%r13 adcxq 24(%rbx),%r13 adoxq %rbp,%r14 leaq 32(%rsi),%rsi leaq 32(%rbx),%rbx adcxq %rbp,%r14 adoxq %r15,%r10 mulxq 0(%rcx),%rax,%r15 adcxq %rax,%r10 adoxq %r15,%r11 mulxq 8(%rcx),%rax,%r15 adcxq %rax,%r11 adoxq %r15,%r12 mulxq 16(%rcx),%rax,%r15 movq %r10,-40(%rbx) adcxq %rax,%r12 adoxq %r15,%r13 movq %r11,-32(%rbx) mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx leaq 32(%rcx),%rcx movq %r12,-24(%rbx) adcxq %rax,%r13 adoxq %rbp,%r15 movq %r13,-16(%rbx) decq %rdi jnz .Lmulx4x_inner movq 0+8(%rsp),%rax adcq %rbp,%r15 subq 0(%rbx),%rdi movq 8+8(%rsp),%rdi movq 16+8(%rsp),%r10 adcq %r15,%r14 leaq (%rsi,%rax,1),%rsi adcq %rbp,%rbp movq %r14,-8(%rbx) cmpq %r10,%rdi jb .Lmulx4x_outer movq -8(%rcx),%r10 movq %rbp,%r8 movq (%rcx,%rax,1),%r12 leaq (%rcx,%rax,1),%rbp movq %rax,%rcx leaq (%rbx,%rax,1),%rdi xorl %eax,%eax xorq %r15,%r15 subq %r14,%r10 adcq %r15,%r15 orq %r15,%r8 sarq $3+2,%rcx subq %r8,%rax movq 56+8(%rsp),%rdx decq %r12 movq 8(%rbp),%r13 xorq %r8,%r8 movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp .Lsqrx4x_sub_entry .cfi_endproc .size mulx4x_internal,.-mulx4x_internal .type bn_powerx5,@function .align 32 bn_powerx5: .cfi_startproc movq %rsp,%rax .cfi_def_cfa_register %rax .Lpowerx5_enter: pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 .Lpowerx5_prologue: shll $3,%r9d leaq (%r9,%r9,2),%r10 negq %r9 movq (%r8),%r8 leaq -320(%rsp,%r9,2),%r11 movq %rsp,%rbp subq %rdi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb .Lpwrx_sp_alt subq %r11,%rbp leaq -320(%rbp,%r9,2),%rbp jmp .Lpwrx_sp_done .align 32 .Lpwrx_sp_alt: leaq 4096-320(,%r9,2),%r10 leaq -320(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp .Lpwrx_sp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lpwrx_page_walk jmp .Lpwrx_page_walk_done .Lpwrx_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lpwrx_page_walk .Lpwrx_page_walk_done: movq %r9,%r10 negq %r9 pxor %xmm0,%xmm0 .byte 102,72,15,110,207 .byte 102,72,15,110,209 .byte 102,73,15,110,218 .byte 102,72,15,110,226 movq %r8,32(%rsp) movq %rax,40(%rsp) .cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08 .Lpowerx5_body: call __bn_sqrx8x_internal call __bn_postx4x_internal call __bn_sqrx8x_internal call __bn_postx4x_internal call __bn_sqrx8x_internal call __bn_postx4x_internal call __bn_sqrx8x_internal call __bn_postx4x_internal call __bn_sqrx8x_internal call __bn_postx4x_internal movq %r10,%r9 movq %rsi,%rdi .byte 102,72,15,126,209 .byte 102,72,15,126,226 movq 40(%rsp),%rax call mulx4x_internal movq 40(%rsp),%rsi .cfi_def_cfa %rsi,8 movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lpowerx5_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_powerx5,.-bn_powerx5 .globl bn_sqrx8x_internal .hidden bn_sqrx8x_internal .hidden bn_sqrx8x_internal .type bn_sqrx8x_internal,@function .align 32 bn_sqrx8x_internal: __bn_sqrx8x_internal: .cfi_startproc _CET_ENDBR leaq 48+8(%rsp),%rdi leaq (%rsi,%r9,1),%rbp movq %r9,0+8(%rsp) movq %rbp,8+8(%rsp) jmp .Lsqr8x_zero_start .align 32 .byte 0x66,0x66,0x66,0x2e,0x0f,0x1f,0x84,0x00,0x00,0x00,0x00,0x00 .Lsqrx8x_zero: .byte 0x3e movdqa %xmm0,0(%rdi) movdqa %xmm0,16(%rdi) movdqa %xmm0,32(%rdi) movdqa %xmm0,48(%rdi) .Lsqr8x_zero_start: movdqa %xmm0,64(%rdi) movdqa %xmm0,80(%rdi) movdqa %xmm0,96(%rdi) movdqa %xmm0,112(%rdi) leaq 128(%rdi),%rdi subq $64,%r9 jnz .Lsqrx8x_zero movq 0(%rsi),%rdx xorq %r10,%r10 xorq %r11,%r11 xorq %r12,%r12 xorq %r13,%r13 xorq %r14,%r14 xorq %r15,%r15 leaq 48+8(%rsp),%rdi xorq %rbp,%rbp jmp .Lsqrx8x_outer_loop .align 32 .Lsqrx8x_outer_loop: mulxq 8(%rsi),%r8,%rax adcxq %r9,%r8 adoxq %rax,%r10 mulxq 16(%rsi),%r9,%rax adcxq %r10,%r9 adoxq %rax,%r11 .byte 0xc4,0xe2,0xab,0xf6,0x86,0x18,0x00,0x00,0x00 adcxq %r11,%r10 adoxq %rax,%r12 .byte 0xc4,0xe2,0xa3,0xf6,0x86,0x20,0x00,0x00,0x00 adcxq %r12,%r11 adoxq %rax,%r13 mulxq 40(%rsi),%r12,%rax adcxq %r13,%r12 adoxq %rax,%r14 mulxq 48(%rsi),%r13,%rax adcxq %r14,%r13 adoxq %r15,%rax mulxq 56(%rsi),%r14,%r15 movq 8(%rsi),%rdx adcxq %rax,%r14 adoxq %rbp,%r15 adcq 64(%rdi),%r15 movq %r8,8(%rdi) movq %r9,16(%rdi) sbbq %rcx,%rcx xorq %rbp,%rbp mulxq 16(%rsi),%r8,%rbx mulxq 24(%rsi),%r9,%rax adcxq %r10,%r8 adoxq %rbx,%r9 mulxq 32(%rsi),%r10,%rbx adcxq %r11,%r9 adoxq %rax,%r10 .byte 0xc4,0xe2,0xa3,0xf6,0x86,0x28,0x00,0x00,0x00 adcxq %r12,%r10 adoxq %rbx,%r11 .byte 0xc4,0xe2,0x9b,0xf6,0x9e,0x30,0x00,0x00,0x00 adcxq %r13,%r11 adoxq %r14,%r12 .byte 0xc4,0x62,0x93,0xf6,0xb6,0x38,0x00,0x00,0x00 movq 16(%rsi),%rdx adcxq %rax,%r12 adoxq %rbx,%r13 adcxq %r15,%r13 adoxq %rbp,%r14 adcxq %rbp,%r14 movq %r8,24(%rdi) movq %r9,32(%rdi) mulxq 24(%rsi),%r8,%rbx mulxq 32(%rsi),%r9,%rax adcxq %r10,%r8 adoxq %rbx,%r9 mulxq 40(%rsi),%r10,%rbx adcxq %r11,%r9 adoxq %rax,%r10 .byte 0xc4,0xe2,0xa3,0xf6,0x86,0x30,0x00,0x00,0x00 adcxq %r12,%r10 adoxq %r13,%r11 .byte 0xc4,0x62,0x9b,0xf6,0xae,0x38,0x00,0x00,0x00 .byte 0x3e movq 24(%rsi),%rdx adcxq %rbx,%r11 adoxq %rax,%r12 adcxq %r14,%r12 movq %r8,40(%rdi) movq %r9,48(%rdi) mulxq 32(%rsi),%r8,%rax adoxq %rbp,%r13 adcxq %rbp,%r13 mulxq 40(%rsi),%r9,%rbx adcxq %r10,%r8 adoxq %rax,%r9 mulxq 48(%rsi),%r10,%rax adcxq %r11,%r9 adoxq %r12,%r10 mulxq 56(%rsi),%r11,%r12 movq 32(%rsi),%rdx movq 40(%rsi),%r14 adcxq %rbx,%r10 adoxq %rax,%r11 movq 48(%rsi),%r15 adcxq %r13,%r11 adoxq %rbp,%r12 adcxq %rbp,%r12 movq %r8,56(%rdi) movq %r9,64(%rdi) mulxq %r14,%r9,%rax movq 56(%rsi),%r8 adcxq %r10,%r9 mulxq %r15,%r10,%rbx adoxq %rax,%r10 adcxq %r11,%r10 mulxq %r8,%r11,%rax movq %r14,%rdx adoxq %rbx,%r11 adcxq %r12,%r11 adcxq %rbp,%rax mulxq %r15,%r14,%rbx mulxq %r8,%r12,%r13 movq %r15,%rdx leaq 64(%rsi),%rsi adcxq %r14,%r11 adoxq %rbx,%r12 adcxq %rax,%r12 adoxq %rbp,%r13 .byte 0x67,0x67 mulxq %r8,%r8,%r14 adcxq %r8,%r13 adcxq %rbp,%r14 cmpq 8+8(%rsp),%rsi je .Lsqrx8x_outer_break negq %rcx movq $-8,%rcx movq %rbp,%r15 movq 64(%rdi),%r8 adcxq 72(%rdi),%r9 adcxq 80(%rdi),%r10 adcxq 88(%rdi),%r11 adcq 96(%rdi),%r12 adcq 104(%rdi),%r13 adcq 112(%rdi),%r14 adcq 120(%rdi),%r15 leaq (%rsi),%rbp leaq 128(%rdi),%rdi sbbq %rax,%rax movq -64(%rsi),%rdx movq %rax,16+8(%rsp) movq %rdi,24+8(%rsp) xorl %eax,%eax jmp .Lsqrx8x_loop .align 32 .Lsqrx8x_loop: movq %r8,%rbx mulxq 0(%rbp),%rax,%r8 adcxq %rax,%rbx adoxq %r9,%r8 mulxq 8(%rbp),%rax,%r9 adcxq %rax,%r8 adoxq %r10,%r9 mulxq 16(%rbp),%rax,%r10 adcxq %rax,%r9 adoxq %r11,%r10 mulxq 24(%rbp),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 .byte 0xc4,0x62,0xfb,0xf6,0xa5,0x20,0x00,0x00,0x00 adcxq %rax,%r11 adoxq %r13,%r12 mulxq 40(%rbp),%rax,%r13 adcxq %rax,%r12 adoxq %r14,%r13 mulxq 48(%rbp),%rax,%r14 movq %rbx,(%rdi,%rcx,8) movl $0,%ebx adcxq %rax,%r13 adoxq %r15,%r14 .byte 0xc4,0x62,0xfb,0xf6,0xbd,0x38,0x00,0x00,0x00 movq 8(%rsi,%rcx,8),%rdx adcxq %rax,%r14 adoxq %rbx,%r15 adcxq %rbx,%r15 .byte 0x67 incq %rcx jnz .Lsqrx8x_loop leaq 64(%rbp),%rbp movq $-8,%rcx cmpq 8+8(%rsp),%rbp je .Lsqrx8x_break subq 16+8(%rsp),%rbx .byte 0x66 movq -64(%rsi),%rdx adcxq 0(%rdi),%r8 adcxq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 leaq 64(%rdi),%rdi .byte 0x67 sbbq %rax,%rax xorl %ebx,%ebx movq %rax,16+8(%rsp) jmp .Lsqrx8x_loop .align 32 .Lsqrx8x_break: xorq %rbp,%rbp subq 16+8(%rsp),%rbx adcxq %rbp,%r8 movq 24+8(%rsp),%rcx adcxq %rbp,%r9 movq 0(%rsi),%rdx adcq $0,%r10 movq %r8,0(%rdi) adcq $0,%r11 adcq $0,%r12 adcq $0,%r13 adcq $0,%r14 adcq $0,%r15 cmpq %rcx,%rdi je .Lsqrx8x_outer_loop movq %r9,8(%rdi) movq 8(%rcx),%r9 movq %r10,16(%rdi) movq 16(%rcx),%r10 movq %r11,24(%rdi) movq 24(%rcx),%r11 movq %r12,32(%rdi) movq 32(%rcx),%r12 movq %r13,40(%rdi) movq 40(%rcx),%r13 movq %r14,48(%rdi) movq 48(%rcx),%r14 movq %r15,56(%rdi) movq 56(%rcx),%r15 movq %rcx,%rdi jmp .Lsqrx8x_outer_loop .align 32 .Lsqrx8x_outer_break: movq %r9,72(%rdi) .byte 102,72,15,126,217 movq %r10,80(%rdi) movq %r11,88(%rdi) movq %r12,96(%rdi) movq %r13,104(%rdi) movq %r14,112(%rdi) leaq 48+8(%rsp),%rdi movq (%rsi,%rcx,1),%rdx movq 8(%rdi),%r11 xorq %r10,%r10 movq 0+8(%rsp),%r9 adoxq %r11,%r11 movq 16(%rdi),%r12 movq 24(%rdi),%r13 .align 32 .Lsqrx4x_shift_n_add: mulxq %rdx,%rax,%rbx adoxq %r12,%r12 adcxq %r10,%rax .byte 0x48,0x8b,0x94,0x0e,0x08,0x00,0x00,0x00 .byte 0x4c,0x8b,0x97,0x20,0x00,0x00,0x00 adoxq %r13,%r13 adcxq %r11,%rbx movq 40(%rdi),%r11 movq %rax,0(%rdi) movq %rbx,8(%rdi) mulxq %rdx,%rax,%rbx adoxq %r10,%r10 adcxq %r12,%rax movq 16(%rsi,%rcx,1),%rdx movq 48(%rdi),%r12 adoxq %r11,%r11 adcxq %r13,%rbx movq 56(%rdi),%r13 movq %rax,16(%rdi) movq %rbx,24(%rdi) mulxq %rdx,%rax,%rbx adoxq %r12,%r12 adcxq %r10,%rax movq 24(%rsi,%rcx,1),%rdx leaq 32(%rcx),%rcx movq 64(%rdi),%r10 adoxq %r13,%r13 adcxq %r11,%rbx movq 72(%rdi),%r11 movq %rax,32(%rdi) movq %rbx,40(%rdi) mulxq %rdx,%rax,%rbx adoxq %r10,%r10 adcxq %r12,%rax jrcxz .Lsqrx4x_shift_n_add_break .byte 0x48,0x8b,0x94,0x0e,0x00,0x00,0x00,0x00 adoxq %r11,%r11 adcxq %r13,%rbx movq 80(%rdi),%r12 movq 88(%rdi),%r13 movq %rax,48(%rdi) movq %rbx,56(%rdi) leaq 64(%rdi),%rdi nop jmp .Lsqrx4x_shift_n_add .align 32 .Lsqrx4x_shift_n_add_break: adcxq %r13,%rbx movq %rax,48(%rdi) movq %rbx,56(%rdi) leaq 64(%rdi),%rdi .byte 102,72,15,126,213 __bn_sqrx8x_reduction: xorl %eax,%eax movq 32+8(%rsp),%rbx movq 48+8(%rsp),%rdx leaq -64(%rbp,%r9,1),%rcx movq %rcx,0+8(%rsp) movq %rdi,8+8(%rsp) leaq 48+8(%rsp),%rdi jmp .Lsqrx8x_reduction_loop .align 32 .Lsqrx8x_reduction_loop: movq 8(%rdi),%r9 movq 16(%rdi),%r10 movq 24(%rdi),%r11 movq 32(%rdi),%r12 movq %rdx,%r8 imulq %rbx,%rdx movq 40(%rdi),%r13 movq 48(%rdi),%r14 movq 56(%rdi),%r15 movq %rax,24+8(%rsp) leaq 64(%rdi),%rdi xorq %rsi,%rsi movq $-8,%rcx jmp .Lsqrx8x_reduce .align 32 .Lsqrx8x_reduce: movq %r8,%rbx mulxq 0(%rbp),%rax,%r8 adcxq %rbx,%rax adoxq %r9,%r8 mulxq 8(%rbp),%rbx,%r9 adcxq %rbx,%r8 adoxq %r10,%r9 mulxq 16(%rbp),%rbx,%r10 adcxq %rbx,%r9 adoxq %r11,%r10 mulxq 24(%rbp),%rbx,%r11 adcxq %rbx,%r10 adoxq %r12,%r11 .byte 0xc4,0x62,0xe3,0xf6,0xa5,0x20,0x00,0x00,0x00 movq %rdx,%rax movq %r8,%rdx adcxq %rbx,%r11 adoxq %r13,%r12 mulxq 32+8(%rsp),%rbx,%rdx movq %rax,%rdx movq %rax,64+48+8(%rsp,%rcx,8) mulxq 40(%rbp),%rax,%r13 adcxq %rax,%r12 adoxq %r14,%r13 mulxq 48(%rbp),%rax,%r14 adcxq %rax,%r13 adoxq %r15,%r14 mulxq 56(%rbp),%rax,%r15 movq %rbx,%rdx adcxq %rax,%r14 adoxq %rsi,%r15 adcxq %rsi,%r15 .byte 0x67,0x67,0x67 incq %rcx jnz .Lsqrx8x_reduce movq %rsi,%rax cmpq 0+8(%rsp),%rbp jae .Lsqrx8x_no_tail movq 48+8(%rsp),%rdx addq 0(%rdi),%r8 leaq 64(%rbp),%rbp movq $-8,%rcx adcxq 8(%rdi),%r9 adcxq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 leaq 64(%rdi),%rdi sbbq %rax,%rax xorq %rsi,%rsi movq %rax,16+8(%rsp) jmp .Lsqrx8x_tail .align 32 .Lsqrx8x_tail: movq %r8,%rbx mulxq 0(%rbp),%rax,%r8 adcxq %rax,%rbx adoxq %r9,%r8 mulxq 8(%rbp),%rax,%r9 adcxq %rax,%r8 adoxq %r10,%r9 mulxq 16(%rbp),%rax,%r10 adcxq %rax,%r9 adoxq %r11,%r10 mulxq 24(%rbp),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 .byte 0xc4,0x62,0xfb,0xf6,0xa5,0x20,0x00,0x00,0x00 adcxq %rax,%r11 adoxq %r13,%r12 mulxq 40(%rbp),%rax,%r13 adcxq %rax,%r12 adoxq %r14,%r13 mulxq 48(%rbp),%rax,%r14 adcxq %rax,%r13 adoxq %r15,%r14 mulxq 56(%rbp),%rax,%r15 movq 72+48+8(%rsp,%rcx,8),%rdx adcxq %rax,%r14 adoxq %rsi,%r15 movq %rbx,(%rdi,%rcx,8) movq %r8,%rbx adcxq %rsi,%r15 incq %rcx jnz .Lsqrx8x_tail cmpq 0+8(%rsp),%rbp jae .Lsqrx8x_tail_done subq 16+8(%rsp),%rsi movq 48+8(%rsp),%rdx leaq 64(%rbp),%rbp adcq 0(%rdi),%r8 adcq 8(%rdi),%r9 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 leaq 64(%rdi),%rdi sbbq %rax,%rax subq $8,%rcx xorq %rsi,%rsi movq %rax,16+8(%rsp) jmp .Lsqrx8x_tail .align 32 .Lsqrx8x_tail_done: xorq %rax,%rax addq 24+8(%rsp),%r8 adcq $0,%r9 adcq $0,%r10 adcq $0,%r11 adcq $0,%r12 adcq $0,%r13 adcq $0,%r14 adcq $0,%r15 adcq $0,%rax subq 16+8(%rsp),%rsi .Lsqrx8x_no_tail: adcq 0(%rdi),%r8 .byte 102,72,15,126,217 adcq 8(%rdi),%r9 movq 56(%rbp),%rsi .byte 102,72,15,126,213 adcq 16(%rdi),%r10 adcq 24(%rdi),%r11 adcq 32(%rdi),%r12 adcq 40(%rdi),%r13 adcq 48(%rdi),%r14 adcq 56(%rdi),%r15 adcq $0,%rax movq 32+8(%rsp),%rbx movq 64(%rdi,%rcx,1),%rdx movq %r8,0(%rdi) leaq 64(%rdi),%r8 movq %r9,8(%rdi) movq %r10,16(%rdi) movq %r11,24(%rdi) movq %r12,32(%rdi) movq %r13,40(%rdi) movq %r14,48(%rdi) movq %r15,56(%rdi) leaq 64(%rdi,%rcx,1),%rdi cmpq 8+8(%rsp),%r8 jb .Lsqrx8x_reduction_loop .byte 0xf3,0xc3 .cfi_endproc .size bn_sqrx8x_internal,.-bn_sqrx8x_internal .align 32 .type __bn_postx4x_internal,@function __bn_postx4x_internal: .cfi_startproc movq 0(%rbp),%r12 movq %rcx,%r10 movq %rcx,%r9 negq %rax sarq $3+2,%rcx .byte 102,72,15,126,202 .byte 102,72,15,126,206 decq %r12 movq 8(%rbp),%r13 xorq %r8,%r8 movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp .Lsqrx4x_sub_entry .align 16 .Lsqrx4x_sub: movq 0(%rbp),%r12 movq 8(%rbp),%r13 movq 16(%rbp),%r14 movq 24(%rbp),%r15 .Lsqrx4x_sub_entry: andnq %rax,%r12,%r12 leaq 32(%rbp),%rbp andnq %rax,%r13,%r13 andnq %rax,%r14,%r14 andnq %rax,%r15,%r15 negq %r8 adcq 0(%rdi),%r12 adcq 8(%rdi),%r13 adcq 16(%rdi),%r14 adcq 24(%rdi),%r15 movq %r12,0(%rdx) leaq 32(%rdi),%rdi movq %r13,8(%rdx) sbbq %r8,%r8 movq %r14,16(%rdx) movq %r15,24(%rdx) leaq 32(%rdx),%rdx incq %rcx jnz .Lsqrx4x_sub negq %r9 .byte 0xf3,0xc3 .cfi_endproc .size __bn_postx4x_internal,.-__bn_postx4x_internal #endif .globl bn_scatter5 .hidden bn_scatter5 .type bn_scatter5,@function .align 16 bn_scatter5: .cfi_startproc _CET_ENDBR cmpl $0,%esi jz .Lscatter_epilogue leaq (%rdx,%rcx,8),%rdx .Lscatter: movq (%rdi),%rax leaq 8(%rdi),%rdi movq %rax,(%rdx) leaq 256(%rdx),%rdx subl $1,%esi jnz .Lscatter .Lscatter_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_scatter5,.-bn_scatter5 .globl bn_gather5 .hidden bn_gather5 .type bn_gather5,@function .align 32 bn_gather5: .cfi_startproc .LSEH_begin_bn_gather5: _CET_ENDBR .byte 0x4c,0x8d,0x14,0x24 .cfi_def_cfa_register %r10 .byte 0x48,0x81,0xec,0x08,0x01,0x00,0x00 leaq .Linc(%rip),%rax andq $-16,%rsp movd %ecx,%xmm5 movdqa 0(%rax),%xmm0 movdqa 16(%rax),%xmm1 leaq 128(%rdx),%r11 leaq 128(%rsp),%rax pshufd $0,%xmm5,%xmm5 movdqa %xmm1,%xmm4 movdqa %xmm1,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,-128(%rax) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,-112(%rax) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,-96(%rax) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,-80(%rax) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,-64(%rax) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,-48(%rax) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,-32(%rax) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,-16(%rax) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,0(%rax) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,16(%rax) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,32(%rax) movdqa %xmm4,%xmm2 paddd %xmm0,%xmm1 pcmpeqd %xmm5,%xmm0 movdqa %xmm3,48(%rax) movdqa %xmm4,%xmm3 paddd %xmm1,%xmm2 pcmpeqd %xmm5,%xmm1 movdqa %xmm0,64(%rax) movdqa %xmm4,%xmm0 paddd %xmm2,%xmm3 pcmpeqd %xmm5,%xmm2 movdqa %xmm1,80(%rax) movdqa %xmm4,%xmm1 paddd %xmm3,%xmm0 pcmpeqd %xmm5,%xmm3 movdqa %xmm2,96(%rax) movdqa %xmm4,%xmm2 movdqa %xmm3,112(%rax) jmp .Lgather .align 32 .Lgather: pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 movdqa -128(%r11),%xmm0 movdqa -112(%r11),%xmm1 movdqa -96(%r11),%xmm2 pand -128(%rax),%xmm0 movdqa -80(%r11),%xmm3 pand -112(%rax),%xmm1 por %xmm0,%xmm4 pand -96(%rax),%xmm2 por %xmm1,%xmm5 pand -80(%rax),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa -64(%r11),%xmm0 movdqa -48(%r11),%xmm1 movdqa -32(%r11),%xmm2 pand -64(%rax),%xmm0 movdqa -16(%r11),%xmm3 pand -48(%rax),%xmm1 por %xmm0,%xmm4 pand -32(%rax),%xmm2 por %xmm1,%xmm5 pand -16(%rax),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 0(%r11),%xmm0 movdqa 16(%r11),%xmm1 movdqa 32(%r11),%xmm2 pand 0(%rax),%xmm0 movdqa 48(%r11),%xmm3 pand 16(%rax),%xmm1 por %xmm0,%xmm4 pand 32(%rax),%xmm2 por %xmm1,%xmm5 pand 48(%rax),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqa 64(%r11),%xmm0 movdqa 80(%r11),%xmm1 movdqa 96(%r11),%xmm2 pand 64(%rax),%xmm0 movdqa 112(%r11),%xmm3 pand 80(%rax),%xmm1 por %xmm0,%xmm4 pand 96(%rax),%xmm2 por %xmm1,%xmm5 pand 112(%rax),%xmm3 por %xmm2,%xmm4 por %xmm3,%xmm5 por %xmm5,%xmm4 leaq 256(%r11),%r11 pshufd $0x4e,%xmm4,%xmm0 por %xmm4,%xmm0 movq %xmm0,(%rdi) leaq 8(%rdi),%rdi subl $1,%esi jnz .Lgather leaq (%r10),%rsp .cfi_def_cfa_register %rsp .byte 0xf3,0xc3 .LSEH_end_bn_gather5: .cfi_endproc .size bn_gather5,.-bn_gather5 .section .rodata .align 64 .Linc: .long 0,0, 1,1 .long 2,2, 2,2 .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,119,105,116,104,32,115,99,97,116,116,101,114,47,103,97,116,104,101,114,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text #endif
marvin-hansen/iggy-streaming-system
39,466
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/rsaz-avx2.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .globl rsaz_1024_sqr_avx2 .hidden rsaz_1024_sqr_avx2 .type rsaz_1024_sqr_avx2,@function .align 64 rsaz_1024_sqr_avx2: .cfi_startproc _CET_ENDBR leaq (%rsp),%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 vzeroupper movq %rax,%rbp .cfi_def_cfa_register %rbp movq %rdx,%r13 subq $832,%rsp movq %r13,%r15 subq $-128,%rdi subq $-128,%rsi subq $-128,%r13 andq $4095,%r15 addq $320,%r15 shrq $12,%r15 vpxor %ymm9,%ymm9,%ymm9 jz .Lsqr_1024_no_n_copy subq $320,%rsp vmovdqu 0-128(%r13),%ymm0 andq $-2048,%rsp vmovdqu 32-128(%r13),%ymm1 vmovdqu 64-128(%r13),%ymm2 vmovdqu 96-128(%r13),%ymm3 vmovdqu 128-128(%r13),%ymm4 vmovdqu 160-128(%r13),%ymm5 vmovdqu 192-128(%r13),%ymm6 vmovdqu 224-128(%r13),%ymm7 vmovdqu 256-128(%r13),%ymm8 leaq 832+128(%rsp),%r13 vmovdqu %ymm0,0-128(%r13) vmovdqu %ymm1,32-128(%r13) vmovdqu %ymm2,64-128(%r13) vmovdqu %ymm3,96-128(%r13) vmovdqu %ymm4,128-128(%r13) vmovdqu %ymm5,160-128(%r13) vmovdqu %ymm6,192-128(%r13) vmovdqu %ymm7,224-128(%r13) vmovdqu %ymm8,256-128(%r13) vmovdqu %ymm9,288-128(%r13) .Lsqr_1024_no_n_copy: andq $-1024,%rsp vmovdqu 32-128(%rsi),%ymm1 vmovdqu 64-128(%rsi),%ymm2 vmovdqu 96-128(%rsi),%ymm3 vmovdqu 128-128(%rsi),%ymm4 vmovdqu 160-128(%rsi),%ymm5 vmovdqu 192-128(%rsi),%ymm6 vmovdqu 224-128(%rsi),%ymm7 vmovdqu 256-128(%rsi),%ymm8 leaq 192(%rsp),%rbx vmovdqu .Land_mask(%rip),%ymm15 jmp .LOOP_GRANDE_SQR_1024 .align 32 .LOOP_GRANDE_SQR_1024: leaq 576+128(%rsp),%r9 leaq 448(%rsp),%r12 vpaddq %ymm1,%ymm1,%ymm1 vpbroadcastq 0-128(%rsi),%ymm10 vpaddq %ymm2,%ymm2,%ymm2 vmovdqa %ymm1,0-128(%r9) vpaddq %ymm3,%ymm3,%ymm3 vmovdqa %ymm2,32-128(%r9) vpaddq %ymm4,%ymm4,%ymm4 vmovdqa %ymm3,64-128(%r9) vpaddq %ymm5,%ymm5,%ymm5 vmovdqa %ymm4,96-128(%r9) vpaddq %ymm6,%ymm6,%ymm6 vmovdqa %ymm5,128-128(%r9) vpaddq %ymm7,%ymm7,%ymm7 vmovdqa %ymm6,160-128(%r9) vpaddq %ymm8,%ymm8,%ymm8 vmovdqa %ymm7,192-128(%r9) vpxor %ymm9,%ymm9,%ymm9 vmovdqa %ymm8,224-128(%r9) vpmuludq 0-128(%rsi),%ymm10,%ymm0 vpbroadcastq 32-128(%rsi),%ymm11 vmovdqu %ymm9,288-192(%rbx) vpmuludq %ymm10,%ymm1,%ymm1 vmovdqu %ymm9,320-448(%r12) vpmuludq %ymm10,%ymm2,%ymm2 vmovdqu %ymm9,352-448(%r12) vpmuludq %ymm10,%ymm3,%ymm3 vmovdqu %ymm9,384-448(%r12) vpmuludq %ymm10,%ymm4,%ymm4 vmovdqu %ymm9,416-448(%r12) vpmuludq %ymm10,%ymm5,%ymm5 vmovdqu %ymm9,448-448(%r12) vpmuludq %ymm10,%ymm6,%ymm6 vmovdqu %ymm9,480-448(%r12) vpmuludq %ymm10,%ymm7,%ymm7 vmovdqu %ymm9,512-448(%r12) vpmuludq %ymm10,%ymm8,%ymm8 vpbroadcastq 64-128(%rsi),%ymm10 vmovdqu %ymm9,544-448(%r12) movq %rsi,%r15 movl $4,%r14d jmp .Lsqr_entry_1024 .align 32 .LOOP_SQR_1024: vpbroadcastq 32-128(%r15),%ymm11 vpmuludq 0-128(%rsi),%ymm10,%ymm0 vpaddq 0-192(%rbx),%ymm0,%ymm0 vpmuludq 0-128(%r9),%ymm10,%ymm1 vpaddq 32-192(%rbx),%ymm1,%ymm1 vpmuludq 32-128(%r9),%ymm10,%ymm2 vpaddq 64-192(%rbx),%ymm2,%ymm2 vpmuludq 64-128(%r9),%ymm10,%ymm3 vpaddq 96-192(%rbx),%ymm3,%ymm3 vpmuludq 96-128(%r9),%ymm10,%ymm4 vpaddq 128-192(%rbx),%ymm4,%ymm4 vpmuludq 128-128(%r9),%ymm10,%ymm5 vpaddq 160-192(%rbx),%ymm5,%ymm5 vpmuludq 160-128(%r9),%ymm10,%ymm6 vpaddq 192-192(%rbx),%ymm6,%ymm6 vpmuludq 192-128(%r9),%ymm10,%ymm7 vpaddq 224-192(%rbx),%ymm7,%ymm7 vpmuludq 224-128(%r9),%ymm10,%ymm8 vpbroadcastq 64-128(%r15),%ymm10 vpaddq 256-192(%rbx),%ymm8,%ymm8 .Lsqr_entry_1024: vmovdqu %ymm0,0-192(%rbx) vmovdqu %ymm1,32-192(%rbx) vpmuludq 32-128(%rsi),%ymm11,%ymm12 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq 32-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm3,%ymm3 vpmuludq 64-128(%r9),%ymm11,%ymm13 vpaddq %ymm13,%ymm4,%ymm4 vpmuludq 96-128(%r9),%ymm11,%ymm12 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq 128-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm6,%ymm6 vpmuludq 160-128(%r9),%ymm11,%ymm13 vpaddq %ymm13,%ymm7,%ymm7 vpmuludq 192-128(%r9),%ymm11,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq 224-128(%r9),%ymm11,%ymm0 vpbroadcastq 96-128(%r15),%ymm11 vpaddq 288-192(%rbx),%ymm0,%ymm0 vmovdqu %ymm2,64-192(%rbx) vmovdqu %ymm3,96-192(%rbx) vpmuludq 64-128(%rsi),%ymm10,%ymm13 vpaddq %ymm13,%ymm4,%ymm4 vpmuludq 64-128(%r9),%ymm10,%ymm12 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq 96-128(%r9),%ymm10,%ymm14 vpaddq %ymm14,%ymm6,%ymm6 vpmuludq 128-128(%r9),%ymm10,%ymm13 vpaddq %ymm13,%ymm7,%ymm7 vpmuludq 160-128(%r9),%ymm10,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq 192-128(%r9),%ymm10,%ymm14 vpaddq %ymm14,%ymm0,%ymm0 vpmuludq 224-128(%r9),%ymm10,%ymm1 vpbroadcastq 128-128(%r15),%ymm10 vpaddq 320-448(%r12),%ymm1,%ymm1 vmovdqu %ymm4,128-192(%rbx) vmovdqu %ymm5,160-192(%rbx) vpmuludq 96-128(%rsi),%ymm11,%ymm12 vpaddq %ymm12,%ymm6,%ymm6 vpmuludq 96-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm7,%ymm7 vpmuludq 128-128(%r9),%ymm11,%ymm13 vpaddq %ymm13,%ymm8,%ymm8 vpmuludq 160-128(%r9),%ymm11,%ymm12 vpaddq %ymm12,%ymm0,%ymm0 vpmuludq 192-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm1,%ymm1 vpmuludq 224-128(%r9),%ymm11,%ymm2 vpbroadcastq 160-128(%r15),%ymm11 vpaddq 352-448(%r12),%ymm2,%ymm2 vmovdqu %ymm6,192-192(%rbx) vmovdqu %ymm7,224-192(%rbx) vpmuludq 128-128(%rsi),%ymm10,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq 128-128(%r9),%ymm10,%ymm14 vpaddq %ymm14,%ymm0,%ymm0 vpmuludq 160-128(%r9),%ymm10,%ymm13 vpaddq %ymm13,%ymm1,%ymm1 vpmuludq 192-128(%r9),%ymm10,%ymm12 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq 224-128(%r9),%ymm10,%ymm3 vpbroadcastq 192-128(%r15),%ymm10 vpaddq 384-448(%r12),%ymm3,%ymm3 vmovdqu %ymm8,256-192(%rbx) vmovdqu %ymm0,288-192(%rbx) leaq 8(%rbx),%rbx vpmuludq 160-128(%rsi),%ymm11,%ymm13 vpaddq %ymm13,%ymm1,%ymm1 vpmuludq 160-128(%r9),%ymm11,%ymm12 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq 192-128(%r9),%ymm11,%ymm14 vpaddq %ymm14,%ymm3,%ymm3 vpmuludq 224-128(%r9),%ymm11,%ymm4 vpbroadcastq 224-128(%r15),%ymm11 vpaddq 416-448(%r12),%ymm4,%ymm4 vmovdqu %ymm1,320-448(%r12) vmovdqu %ymm2,352-448(%r12) vpmuludq 192-128(%rsi),%ymm10,%ymm12 vpaddq %ymm12,%ymm3,%ymm3 vpmuludq 192-128(%r9),%ymm10,%ymm14 vpbroadcastq 256-128(%r15),%ymm0 vpaddq %ymm14,%ymm4,%ymm4 vpmuludq 224-128(%r9),%ymm10,%ymm5 vpbroadcastq 0+8-128(%r15),%ymm10 vpaddq 448-448(%r12),%ymm5,%ymm5 vmovdqu %ymm3,384-448(%r12) vmovdqu %ymm4,416-448(%r12) leaq 8(%r15),%r15 vpmuludq 224-128(%rsi),%ymm11,%ymm12 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq 224-128(%r9),%ymm11,%ymm6 vpaddq 480-448(%r12),%ymm6,%ymm6 vpmuludq 256-128(%rsi),%ymm0,%ymm7 vmovdqu %ymm5,448-448(%r12) vpaddq 512-448(%r12),%ymm7,%ymm7 vmovdqu %ymm6,480-448(%r12) vmovdqu %ymm7,512-448(%r12) leaq 8(%r12),%r12 decl %r14d jnz .LOOP_SQR_1024 vmovdqu 256(%rsp),%ymm8 vmovdqu 288(%rsp),%ymm1 vmovdqu 320(%rsp),%ymm2 leaq 192(%rsp),%rbx vpsrlq $29,%ymm8,%ymm14 vpand %ymm15,%ymm8,%ymm8 vpsrlq $29,%ymm1,%ymm11 vpand %ymm15,%ymm1,%ymm1 vpermq $0x93,%ymm14,%ymm14 vpxor %ymm9,%ymm9,%ymm9 vpermq $0x93,%ymm11,%ymm11 vpblendd $3,%ymm9,%ymm14,%ymm10 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm8,%ymm8 vpblendd $3,%ymm11,%ymm9,%ymm11 vpaddq %ymm14,%ymm1,%ymm1 vpaddq %ymm11,%ymm2,%ymm2 vmovdqu %ymm1,288-192(%rbx) vmovdqu %ymm2,320-192(%rbx) movq (%rsp),%rax movq 8(%rsp),%r10 movq 16(%rsp),%r11 movq 24(%rsp),%r12 vmovdqu 32(%rsp),%ymm1 vmovdqu 64-192(%rbx),%ymm2 vmovdqu 96-192(%rbx),%ymm3 vmovdqu 128-192(%rbx),%ymm4 vmovdqu 160-192(%rbx),%ymm5 vmovdqu 192-192(%rbx),%ymm6 vmovdqu 224-192(%rbx),%ymm7 movq %rax,%r9 imull %ecx,%eax andl $0x1fffffff,%eax vmovd %eax,%xmm12 movq %rax,%rdx imulq -128(%r13),%rax vpbroadcastq %xmm12,%ymm12 addq %rax,%r9 movq %rdx,%rax imulq 8-128(%r13),%rax shrq $29,%r9 addq %rax,%r10 movq %rdx,%rax imulq 16-128(%r13),%rax addq %r9,%r10 addq %rax,%r11 imulq 24-128(%r13),%rdx addq %rdx,%r12 movq %r10,%rax imull %ecx,%eax andl $0x1fffffff,%eax movl $9,%r14d jmp .LOOP_REDUCE_1024 .align 32 .LOOP_REDUCE_1024: vmovd %eax,%xmm13 vpbroadcastq %xmm13,%ymm13 vpmuludq 32-128(%r13),%ymm12,%ymm10 movq %rax,%rdx imulq -128(%r13),%rax vpaddq %ymm10,%ymm1,%ymm1 addq %rax,%r10 vpmuludq 64-128(%r13),%ymm12,%ymm14 movq %rdx,%rax imulq 8-128(%r13),%rax vpaddq %ymm14,%ymm2,%ymm2 vpmuludq 96-128(%r13),%ymm12,%ymm11 .byte 0x67 addq %rax,%r11 .byte 0x67 movq %rdx,%rax imulq 16-128(%r13),%rax shrq $29,%r10 vpaddq %ymm11,%ymm3,%ymm3 vpmuludq 128-128(%r13),%ymm12,%ymm10 addq %rax,%r12 addq %r10,%r11 vpaddq %ymm10,%ymm4,%ymm4 vpmuludq 160-128(%r13),%ymm12,%ymm14 movq %r11,%rax imull %ecx,%eax vpaddq %ymm14,%ymm5,%ymm5 vpmuludq 192-128(%r13),%ymm12,%ymm11 andl $0x1fffffff,%eax vpaddq %ymm11,%ymm6,%ymm6 vpmuludq 224-128(%r13),%ymm12,%ymm10 vpaddq %ymm10,%ymm7,%ymm7 vpmuludq 256-128(%r13),%ymm12,%ymm14 vmovd %eax,%xmm12 vpaddq %ymm14,%ymm8,%ymm8 vpbroadcastq %xmm12,%ymm12 vpmuludq 32-8-128(%r13),%ymm13,%ymm11 vmovdqu 96-8-128(%r13),%ymm14 movq %rax,%rdx imulq -128(%r13),%rax vpaddq %ymm11,%ymm1,%ymm1 vpmuludq 64-8-128(%r13),%ymm13,%ymm10 vmovdqu 128-8-128(%r13),%ymm11 addq %rax,%r11 movq %rdx,%rax imulq 8-128(%r13),%rax vpaddq %ymm10,%ymm2,%ymm2 addq %r12,%rax shrq $29,%r11 vpmuludq %ymm13,%ymm14,%ymm14 vmovdqu 160-8-128(%r13),%ymm10 addq %r11,%rax vpaddq %ymm14,%ymm3,%ymm3 vpmuludq %ymm13,%ymm11,%ymm11 vmovdqu 192-8-128(%r13),%ymm14 .byte 0x67 movq %rax,%r12 imull %ecx,%eax vpaddq %ymm11,%ymm4,%ymm4 vpmuludq %ymm13,%ymm10,%ymm10 .byte 0xc4,0x41,0x7e,0x6f,0x9d,0x58,0x00,0x00,0x00 andl $0x1fffffff,%eax vpaddq %ymm10,%ymm5,%ymm5 vpmuludq %ymm13,%ymm14,%ymm14 vmovdqu 256-8-128(%r13),%ymm10 vpaddq %ymm14,%ymm6,%ymm6 vpmuludq %ymm13,%ymm11,%ymm11 vmovdqu 288-8-128(%r13),%ymm9 vmovd %eax,%xmm0 imulq -128(%r13),%rax vpaddq %ymm11,%ymm7,%ymm7 vpmuludq %ymm13,%ymm10,%ymm10 vmovdqu 32-16-128(%r13),%ymm14 vpbroadcastq %xmm0,%ymm0 vpaddq %ymm10,%ymm8,%ymm8 vpmuludq %ymm13,%ymm9,%ymm9 vmovdqu 64-16-128(%r13),%ymm11 addq %rax,%r12 vmovdqu 32-24-128(%r13),%ymm13 vpmuludq %ymm12,%ymm14,%ymm14 vmovdqu 96-16-128(%r13),%ymm10 vpaddq %ymm14,%ymm1,%ymm1 vpmuludq %ymm0,%ymm13,%ymm13 vpmuludq %ymm12,%ymm11,%ymm11 .byte 0xc4,0x41,0x7e,0x6f,0xb5,0xf0,0xff,0xff,0xff vpaddq %ymm1,%ymm13,%ymm13 vpaddq %ymm11,%ymm2,%ymm2 vpmuludq %ymm12,%ymm10,%ymm10 vmovdqu 160-16-128(%r13),%ymm11 .byte 0x67 vmovq %xmm13,%rax vmovdqu %ymm13,(%rsp) vpaddq %ymm10,%ymm3,%ymm3 vpmuludq %ymm12,%ymm14,%ymm14 vmovdqu 192-16-128(%r13),%ymm10 vpaddq %ymm14,%ymm4,%ymm4 vpmuludq %ymm12,%ymm11,%ymm11 vmovdqu 224-16-128(%r13),%ymm14 vpaddq %ymm11,%ymm5,%ymm5 vpmuludq %ymm12,%ymm10,%ymm10 vmovdqu 256-16-128(%r13),%ymm11 vpaddq %ymm10,%ymm6,%ymm6 vpmuludq %ymm12,%ymm14,%ymm14 shrq $29,%r12 vmovdqu 288-16-128(%r13),%ymm10 addq %r12,%rax vpaddq %ymm14,%ymm7,%ymm7 vpmuludq %ymm12,%ymm11,%ymm11 movq %rax,%r9 imull %ecx,%eax vpaddq %ymm11,%ymm8,%ymm8 vpmuludq %ymm12,%ymm10,%ymm10 andl $0x1fffffff,%eax vmovd %eax,%xmm12 vmovdqu 96-24-128(%r13),%ymm11 .byte 0x67 vpaddq %ymm10,%ymm9,%ymm9 vpbroadcastq %xmm12,%ymm12 vpmuludq 64-24-128(%r13),%ymm0,%ymm14 vmovdqu 128-24-128(%r13),%ymm10 movq %rax,%rdx imulq -128(%r13),%rax movq 8(%rsp),%r10 vpaddq %ymm14,%ymm2,%ymm1 vpmuludq %ymm0,%ymm11,%ymm11 vmovdqu 160-24-128(%r13),%ymm14 addq %rax,%r9 movq %rdx,%rax imulq 8-128(%r13),%rax .byte 0x67 shrq $29,%r9 movq 16(%rsp),%r11 vpaddq %ymm11,%ymm3,%ymm2 vpmuludq %ymm0,%ymm10,%ymm10 vmovdqu 192-24-128(%r13),%ymm11 addq %rax,%r10 movq %rdx,%rax imulq 16-128(%r13),%rax vpaddq %ymm10,%ymm4,%ymm3 vpmuludq %ymm0,%ymm14,%ymm14 vmovdqu 224-24-128(%r13),%ymm10 imulq 24-128(%r13),%rdx addq %rax,%r11 leaq (%r9,%r10,1),%rax vpaddq %ymm14,%ymm5,%ymm4 vpmuludq %ymm0,%ymm11,%ymm11 vmovdqu 256-24-128(%r13),%ymm14 movq %rax,%r10 imull %ecx,%eax vpmuludq %ymm0,%ymm10,%ymm10 vpaddq %ymm11,%ymm6,%ymm5 vmovdqu 288-24-128(%r13),%ymm11 andl $0x1fffffff,%eax vpaddq %ymm10,%ymm7,%ymm6 vpmuludq %ymm0,%ymm14,%ymm14 addq 24(%rsp),%rdx vpaddq %ymm14,%ymm8,%ymm7 vpmuludq %ymm0,%ymm11,%ymm11 vpaddq %ymm11,%ymm9,%ymm8 vmovq %r12,%xmm9 movq %rdx,%r12 decl %r14d jnz .LOOP_REDUCE_1024 leaq 448(%rsp),%r12 vpaddq %ymm9,%ymm13,%ymm0 vpxor %ymm9,%ymm9,%ymm9 vpaddq 288-192(%rbx),%ymm0,%ymm0 vpaddq 320-448(%r12),%ymm1,%ymm1 vpaddq 352-448(%r12),%ymm2,%ymm2 vpaddq 384-448(%r12),%ymm3,%ymm3 vpaddq 416-448(%r12),%ymm4,%ymm4 vpaddq 448-448(%r12),%ymm5,%ymm5 vpaddq 480-448(%r12),%ymm6,%ymm6 vpaddq 512-448(%r12),%ymm7,%ymm7 vpaddq 544-448(%r12),%ymm8,%ymm8 vpsrlq $29,%ymm0,%ymm14 vpand %ymm15,%ymm0,%ymm0 vpsrlq $29,%ymm1,%ymm11 vpand %ymm15,%ymm1,%ymm1 vpsrlq $29,%ymm2,%ymm12 vpermq $0x93,%ymm14,%ymm14 vpand %ymm15,%ymm2,%ymm2 vpsrlq $29,%ymm3,%ymm13 vpermq $0x93,%ymm11,%ymm11 vpand %ymm15,%ymm3,%ymm3 vpermq $0x93,%ymm12,%ymm12 vpblendd $3,%ymm9,%ymm14,%ymm10 vpermq $0x93,%ymm13,%ymm13 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm0,%ymm0 vpblendd $3,%ymm11,%ymm12,%ymm11 vpaddq %ymm14,%ymm1,%ymm1 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm11,%ymm2,%ymm2 vpblendd $3,%ymm13,%ymm9,%ymm13 vpaddq %ymm12,%ymm3,%ymm3 vpaddq %ymm13,%ymm4,%ymm4 vpsrlq $29,%ymm0,%ymm14 vpand %ymm15,%ymm0,%ymm0 vpsrlq $29,%ymm1,%ymm11 vpand %ymm15,%ymm1,%ymm1 vpsrlq $29,%ymm2,%ymm12 vpermq $0x93,%ymm14,%ymm14 vpand %ymm15,%ymm2,%ymm2 vpsrlq $29,%ymm3,%ymm13 vpermq $0x93,%ymm11,%ymm11 vpand %ymm15,%ymm3,%ymm3 vpermq $0x93,%ymm12,%ymm12 vpblendd $3,%ymm9,%ymm14,%ymm10 vpermq $0x93,%ymm13,%ymm13 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm0,%ymm0 vpblendd $3,%ymm11,%ymm12,%ymm11 vpaddq %ymm14,%ymm1,%ymm1 vmovdqu %ymm0,0-128(%rdi) vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm11,%ymm2,%ymm2 vmovdqu %ymm1,32-128(%rdi) vpblendd $3,%ymm13,%ymm9,%ymm13 vpaddq %ymm12,%ymm3,%ymm3 vmovdqu %ymm2,64-128(%rdi) vpaddq %ymm13,%ymm4,%ymm4 vmovdqu %ymm3,96-128(%rdi) vpsrlq $29,%ymm4,%ymm14 vpand %ymm15,%ymm4,%ymm4 vpsrlq $29,%ymm5,%ymm11 vpand %ymm15,%ymm5,%ymm5 vpsrlq $29,%ymm6,%ymm12 vpermq $0x93,%ymm14,%ymm14 vpand %ymm15,%ymm6,%ymm6 vpsrlq $29,%ymm7,%ymm13 vpermq $0x93,%ymm11,%ymm11 vpand %ymm15,%ymm7,%ymm7 vpsrlq $29,%ymm8,%ymm0 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm8,%ymm8 vpermq $0x93,%ymm13,%ymm13 vpblendd $3,%ymm9,%ymm14,%ymm10 vpermq $0x93,%ymm0,%ymm0 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm4,%ymm4 vpblendd $3,%ymm11,%ymm12,%ymm11 vpaddq %ymm14,%ymm5,%ymm5 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm11,%ymm6,%ymm6 vpblendd $3,%ymm13,%ymm0,%ymm13 vpaddq %ymm12,%ymm7,%ymm7 vpaddq %ymm13,%ymm8,%ymm8 vpsrlq $29,%ymm4,%ymm14 vpand %ymm15,%ymm4,%ymm4 vpsrlq $29,%ymm5,%ymm11 vpand %ymm15,%ymm5,%ymm5 vpsrlq $29,%ymm6,%ymm12 vpermq $0x93,%ymm14,%ymm14 vpand %ymm15,%ymm6,%ymm6 vpsrlq $29,%ymm7,%ymm13 vpermq $0x93,%ymm11,%ymm11 vpand %ymm15,%ymm7,%ymm7 vpsrlq $29,%ymm8,%ymm0 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm8,%ymm8 vpermq $0x93,%ymm13,%ymm13 vpblendd $3,%ymm9,%ymm14,%ymm10 vpermq $0x93,%ymm0,%ymm0 vpblendd $3,%ymm14,%ymm11,%ymm14 vpaddq %ymm10,%ymm4,%ymm4 vpblendd $3,%ymm11,%ymm12,%ymm11 vpaddq %ymm14,%ymm5,%ymm5 vmovdqu %ymm4,128-128(%rdi) vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm11,%ymm6,%ymm6 vmovdqu %ymm5,160-128(%rdi) vpblendd $3,%ymm13,%ymm0,%ymm13 vpaddq %ymm12,%ymm7,%ymm7 vmovdqu %ymm6,192-128(%rdi) vpaddq %ymm13,%ymm8,%ymm8 vmovdqu %ymm7,224-128(%rdi) vmovdqu %ymm8,256-128(%rdi) movq %rdi,%rsi decl %r8d jne .LOOP_GRANDE_SQR_1024 vzeroall movq %rbp,%rax .cfi_def_cfa_register %rax movq -48(%rax),%r15 .cfi_restore %r15 movq -40(%rax),%r14 .cfi_restore %r14 movq -32(%rax),%r13 .cfi_restore %r13 movq -24(%rax),%r12 .cfi_restore %r12 movq -16(%rax),%rbp .cfi_restore %rbp movq -8(%rax),%rbx .cfi_restore %rbx leaq (%rax),%rsp .cfi_def_cfa_register %rsp .Lsqr_1024_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size rsaz_1024_sqr_avx2,.-rsaz_1024_sqr_avx2 .globl rsaz_1024_mul_avx2 .hidden rsaz_1024_mul_avx2 .type rsaz_1024_mul_avx2,@function .align 64 rsaz_1024_mul_avx2: .cfi_startproc _CET_ENDBR leaq (%rsp),%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 movq %rax,%rbp .cfi_def_cfa_register %rbp vzeroall movq %rdx,%r13 subq $64,%rsp .byte 0x67,0x67 movq %rsi,%r15 andq $4095,%r15 addq $320,%r15 shrq $12,%r15 movq %rsi,%r15 cmovnzq %r13,%rsi cmovnzq %r15,%r13 movq %rcx,%r15 subq $-128,%rsi subq $-128,%rcx subq $-128,%rdi andq $4095,%r15 addq $320,%r15 .byte 0x67,0x67 shrq $12,%r15 jz .Lmul_1024_no_n_copy subq $320,%rsp vmovdqu 0-128(%rcx),%ymm0 andq $-512,%rsp vmovdqu 32-128(%rcx),%ymm1 vmovdqu 64-128(%rcx),%ymm2 vmovdqu 96-128(%rcx),%ymm3 vmovdqu 128-128(%rcx),%ymm4 vmovdqu 160-128(%rcx),%ymm5 vmovdqu 192-128(%rcx),%ymm6 vmovdqu 224-128(%rcx),%ymm7 vmovdqu 256-128(%rcx),%ymm8 leaq 64+128(%rsp),%rcx vmovdqu %ymm0,0-128(%rcx) vpxor %ymm0,%ymm0,%ymm0 vmovdqu %ymm1,32-128(%rcx) vpxor %ymm1,%ymm1,%ymm1 vmovdqu %ymm2,64-128(%rcx) vpxor %ymm2,%ymm2,%ymm2 vmovdqu %ymm3,96-128(%rcx) vpxor %ymm3,%ymm3,%ymm3 vmovdqu %ymm4,128-128(%rcx) vpxor %ymm4,%ymm4,%ymm4 vmovdqu %ymm5,160-128(%rcx) vpxor %ymm5,%ymm5,%ymm5 vmovdqu %ymm6,192-128(%rcx) vpxor %ymm6,%ymm6,%ymm6 vmovdqu %ymm7,224-128(%rcx) vpxor %ymm7,%ymm7,%ymm7 vmovdqu %ymm8,256-128(%rcx) vmovdqa %ymm0,%ymm8 vmovdqu %ymm9,288-128(%rcx) .Lmul_1024_no_n_copy: andq $-64,%rsp movq (%r13),%rbx vpbroadcastq (%r13),%ymm10 vmovdqu %ymm0,(%rsp) xorq %r9,%r9 .byte 0x67 xorq %r10,%r10 xorq %r11,%r11 xorq %r12,%r12 vmovdqu .Land_mask(%rip),%ymm15 movl $9,%r14d vmovdqu %ymm9,288-128(%rdi) jmp .Loop_mul_1024 .align 32 .Loop_mul_1024: vpsrlq $29,%ymm3,%ymm9 movq %rbx,%rax imulq -128(%rsi),%rax addq %r9,%rax movq %rbx,%r10 imulq 8-128(%rsi),%r10 addq 8(%rsp),%r10 movq %rax,%r9 imull %r8d,%eax andl $0x1fffffff,%eax movq %rbx,%r11 imulq 16-128(%rsi),%r11 addq 16(%rsp),%r11 movq %rbx,%r12 imulq 24-128(%rsi),%r12 addq 24(%rsp),%r12 vpmuludq 32-128(%rsi),%ymm10,%ymm0 vmovd %eax,%xmm11 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq 64-128(%rsi),%ymm10,%ymm12 vpbroadcastq %xmm11,%ymm11 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq 96-128(%rsi),%ymm10,%ymm13 vpand %ymm15,%ymm3,%ymm3 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq 128-128(%rsi),%ymm10,%ymm0 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq 160-128(%rsi),%ymm10,%ymm12 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq 192-128(%rsi),%ymm10,%ymm13 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq 224-128(%rsi),%ymm10,%ymm0 vpermq $0x93,%ymm9,%ymm9 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq 256-128(%rsi),%ymm10,%ymm12 vpbroadcastq 8(%r13),%ymm10 vpaddq %ymm12,%ymm8,%ymm8 movq %rax,%rdx imulq -128(%rcx),%rax addq %rax,%r9 movq %rdx,%rax imulq 8-128(%rcx),%rax addq %rax,%r10 movq %rdx,%rax imulq 16-128(%rcx),%rax addq %rax,%r11 shrq $29,%r9 imulq 24-128(%rcx),%rdx addq %rdx,%r12 addq %r9,%r10 vpmuludq 32-128(%rcx),%ymm11,%ymm13 vmovq %xmm10,%rbx vpaddq %ymm13,%ymm1,%ymm1 vpmuludq 64-128(%rcx),%ymm11,%ymm0 vpaddq %ymm0,%ymm2,%ymm2 vpmuludq 96-128(%rcx),%ymm11,%ymm12 vpaddq %ymm12,%ymm3,%ymm3 vpmuludq 128-128(%rcx),%ymm11,%ymm13 vpaddq %ymm13,%ymm4,%ymm4 vpmuludq 160-128(%rcx),%ymm11,%ymm0 vpaddq %ymm0,%ymm5,%ymm5 vpmuludq 192-128(%rcx),%ymm11,%ymm12 vpaddq %ymm12,%ymm6,%ymm6 vpmuludq 224-128(%rcx),%ymm11,%ymm13 vpblendd $3,%ymm14,%ymm9,%ymm12 vpaddq %ymm13,%ymm7,%ymm7 vpmuludq 256-128(%rcx),%ymm11,%ymm0 vpaddq %ymm12,%ymm3,%ymm3 vpaddq %ymm0,%ymm8,%ymm8 movq %rbx,%rax imulq -128(%rsi),%rax addq %rax,%r10 vmovdqu -8+32-128(%rsi),%ymm12 movq %rbx,%rax imulq 8-128(%rsi),%rax addq %rax,%r11 vmovdqu -8+64-128(%rsi),%ymm13 movq %r10,%rax vpblendd $0xfc,%ymm14,%ymm9,%ymm9 imull %r8d,%eax vpaddq %ymm9,%ymm4,%ymm4 andl $0x1fffffff,%eax imulq 16-128(%rsi),%rbx addq %rbx,%r12 vpmuludq %ymm10,%ymm12,%ymm12 vmovd %eax,%xmm11 vmovdqu -8+96-128(%rsi),%ymm0 vpaddq %ymm12,%ymm1,%ymm1 vpmuludq %ymm10,%ymm13,%ymm13 vpbroadcastq %xmm11,%ymm11 vmovdqu -8+128-128(%rsi),%ymm12 vpaddq %ymm13,%ymm2,%ymm2 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -8+160-128(%rsi),%ymm13 vpaddq %ymm0,%ymm3,%ymm3 vpmuludq %ymm10,%ymm12,%ymm12 vmovdqu -8+192-128(%rsi),%ymm0 vpaddq %ymm12,%ymm4,%ymm4 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -8+224-128(%rsi),%ymm12 vpaddq %ymm13,%ymm5,%ymm5 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -8+256-128(%rsi),%ymm13 vpaddq %ymm0,%ymm6,%ymm6 vpmuludq %ymm10,%ymm12,%ymm12 vmovdqu -8+288-128(%rsi),%ymm9 vpaddq %ymm12,%ymm7,%ymm7 vpmuludq %ymm10,%ymm13,%ymm13 vpaddq %ymm13,%ymm8,%ymm8 vpmuludq %ymm10,%ymm9,%ymm9 vpbroadcastq 16(%r13),%ymm10 movq %rax,%rdx imulq -128(%rcx),%rax addq %rax,%r10 vmovdqu -8+32-128(%rcx),%ymm0 movq %rdx,%rax imulq 8-128(%rcx),%rax addq %rax,%r11 vmovdqu -8+64-128(%rcx),%ymm12 shrq $29,%r10 imulq 16-128(%rcx),%rdx addq %rdx,%r12 addq %r10,%r11 vpmuludq %ymm11,%ymm0,%ymm0 vmovq %xmm10,%rbx vmovdqu -8+96-128(%rcx),%ymm13 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -8+128-128(%rcx),%ymm0 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -8+160-128(%rcx),%ymm12 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -8+192-128(%rcx),%ymm13 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -8+224-128(%rcx),%ymm0 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -8+256-128(%rcx),%ymm12 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -8+288-128(%rcx),%ymm13 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq %ymm11,%ymm12,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq %ymm11,%ymm13,%ymm13 vpaddq %ymm13,%ymm9,%ymm9 vmovdqu -16+32-128(%rsi),%ymm0 movq %rbx,%rax imulq -128(%rsi),%rax addq %r11,%rax vmovdqu -16+64-128(%rsi),%ymm12 movq %rax,%r11 imull %r8d,%eax andl $0x1fffffff,%eax imulq 8-128(%rsi),%rbx addq %rbx,%r12 vpmuludq %ymm10,%ymm0,%ymm0 vmovd %eax,%xmm11 vmovdqu -16+96-128(%rsi),%ymm13 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq %ymm10,%ymm12,%ymm12 vpbroadcastq %xmm11,%ymm11 vmovdqu -16+128-128(%rsi),%ymm0 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -16+160-128(%rsi),%ymm12 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -16+192-128(%rsi),%ymm13 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq %ymm10,%ymm12,%ymm12 vmovdqu -16+224-128(%rsi),%ymm0 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -16+256-128(%rsi),%ymm12 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -16+288-128(%rsi),%ymm13 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq %ymm10,%ymm12,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq %ymm10,%ymm13,%ymm13 vpbroadcastq 24(%r13),%ymm10 vpaddq %ymm13,%ymm9,%ymm9 vmovdqu -16+32-128(%rcx),%ymm0 movq %rax,%rdx imulq -128(%rcx),%rax addq %rax,%r11 vmovdqu -16+64-128(%rcx),%ymm12 imulq 8-128(%rcx),%rdx addq %rdx,%r12 shrq $29,%r11 vpmuludq %ymm11,%ymm0,%ymm0 vmovq %xmm10,%rbx vmovdqu -16+96-128(%rcx),%ymm13 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -16+128-128(%rcx),%ymm0 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -16+160-128(%rcx),%ymm12 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -16+192-128(%rcx),%ymm13 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -16+224-128(%rcx),%ymm0 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -16+256-128(%rcx),%ymm12 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -16+288-128(%rcx),%ymm13 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -24+32-128(%rsi),%ymm0 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -24+64-128(%rsi),%ymm12 vpaddq %ymm13,%ymm9,%ymm9 addq %r11,%r12 imulq -128(%rsi),%rbx addq %rbx,%r12 movq %r12,%rax imull %r8d,%eax andl $0x1fffffff,%eax vpmuludq %ymm10,%ymm0,%ymm0 vmovd %eax,%xmm11 vmovdqu -24+96-128(%rsi),%ymm13 vpaddq %ymm0,%ymm1,%ymm1 vpmuludq %ymm10,%ymm12,%ymm12 vpbroadcastq %xmm11,%ymm11 vmovdqu -24+128-128(%rsi),%ymm0 vpaddq %ymm12,%ymm2,%ymm2 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -24+160-128(%rsi),%ymm12 vpaddq %ymm13,%ymm3,%ymm3 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -24+192-128(%rsi),%ymm13 vpaddq %ymm0,%ymm4,%ymm4 vpmuludq %ymm10,%ymm12,%ymm12 vmovdqu -24+224-128(%rsi),%ymm0 vpaddq %ymm12,%ymm5,%ymm5 vpmuludq %ymm10,%ymm13,%ymm13 vmovdqu -24+256-128(%rsi),%ymm12 vpaddq %ymm13,%ymm6,%ymm6 vpmuludq %ymm10,%ymm0,%ymm0 vmovdqu -24+288-128(%rsi),%ymm13 vpaddq %ymm0,%ymm7,%ymm7 vpmuludq %ymm10,%ymm12,%ymm12 vpaddq %ymm12,%ymm8,%ymm8 vpmuludq %ymm10,%ymm13,%ymm13 vpbroadcastq 32(%r13),%ymm10 vpaddq %ymm13,%ymm9,%ymm9 addq $32,%r13 vmovdqu -24+32-128(%rcx),%ymm0 imulq -128(%rcx),%rax addq %rax,%r12 shrq $29,%r12 vmovdqu -24+64-128(%rcx),%ymm12 vpmuludq %ymm11,%ymm0,%ymm0 vmovq %xmm10,%rbx vmovdqu -24+96-128(%rcx),%ymm13 vpaddq %ymm0,%ymm1,%ymm0 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu %ymm0,(%rsp) vpaddq %ymm12,%ymm2,%ymm1 vmovdqu -24+128-128(%rcx),%ymm0 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -24+160-128(%rcx),%ymm12 vpaddq %ymm13,%ymm3,%ymm2 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -24+192-128(%rcx),%ymm13 vpaddq %ymm0,%ymm4,%ymm3 vpmuludq %ymm11,%ymm12,%ymm12 vmovdqu -24+224-128(%rcx),%ymm0 vpaddq %ymm12,%ymm5,%ymm4 vpmuludq %ymm11,%ymm13,%ymm13 vmovdqu -24+256-128(%rcx),%ymm12 vpaddq %ymm13,%ymm6,%ymm5 vpmuludq %ymm11,%ymm0,%ymm0 vmovdqu -24+288-128(%rcx),%ymm13 movq %r12,%r9 vpaddq %ymm0,%ymm7,%ymm6 vpmuludq %ymm11,%ymm12,%ymm12 addq (%rsp),%r9 vpaddq %ymm12,%ymm8,%ymm7 vpmuludq %ymm11,%ymm13,%ymm13 vmovq %r12,%xmm12 vpaddq %ymm13,%ymm9,%ymm8 decl %r14d jnz .Loop_mul_1024 vpaddq (%rsp),%ymm12,%ymm0 vpsrlq $29,%ymm0,%ymm12 vpand %ymm15,%ymm0,%ymm0 vpsrlq $29,%ymm1,%ymm13 vpand %ymm15,%ymm1,%ymm1 vpsrlq $29,%ymm2,%ymm10 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm2,%ymm2 vpsrlq $29,%ymm3,%ymm11 vpermq $0x93,%ymm13,%ymm13 vpand %ymm15,%ymm3,%ymm3 vpblendd $3,%ymm14,%ymm12,%ymm9 vpermq $0x93,%ymm10,%ymm10 vpblendd $3,%ymm12,%ymm13,%ymm12 vpermq $0x93,%ymm11,%ymm11 vpaddq %ymm9,%ymm0,%ymm0 vpblendd $3,%ymm13,%ymm10,%ymm13 vpaddq %ymm12,%ymm1,%ymm1 vpblendd $3,%ymm10,%ymm11,%ymm10 vpaddq %ymm13,%ymm2,%ymm2 vpblendd $3,%ymm11,%ymm14,%ymm11 vpaddq %ymm10,%ymm3,%ymm3 vpaddq %ymm11,%ymm4,%ymm4 vpsrlq $29,%ymm0,%ymm12 vpand %ymm15,%ymm0,%ymm0 vpsrlq $29,%ymm1,%ymm13 vpand %ymm15,%ymm1,%ymm1 vpsrlq $29,%ymm2,%ymm10 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm2,%ymm2 vpsrlq $29,%ymm3,%ymm11 vpermq $0x93,%ymm13,%ymm13 vpand %ymm15,%ymm3,%ymm3 vpermq $0x93,%ymm10,%ymm10 vpblendd $3,%ymm14,%ymm12,%ymm9 vpermq $0x93,%ymm11,%ymm11 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm9,%ymm0,%ymm0 vpblendd $3,%ymm13,%ymm10,%ymm13 vpaddq %ymm12,%ymm1,%ymm1 vpblendd $3,%ymm10,%ymm11,%ymm10 vpaddq %ymm13,%ymm2,%ymm2 vpblendd $3,%ymm11,%ymm14,%ymm11 vpaddq %ymm10,%ymm3,%ymm3 vpaddq %ymm11,%ymm4,%ymm4 vmovdqu %ymm0,0-128(%rdi) vmovdqu %ymm1,32-128(%rdi) vmovdqu %ymm2,64-128(%rdi) vmovdqu %ymm3,96-128(%rdi) vpsrlq $29,%ymm4,%ymm12 vpand %ymm15,%ymm4,%ymm4 vpsrlq $29,%ymm5,%ymm13 vpand %ymm15,%ymm5,%ymm5 vpsrlq $29,%ymm6,%ymm10 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm6,%ymm6 vpsrlq $29,%ymm7,%ymm11 vpermq $0x93,%ymm13,%ymm13 vpand %ymm15,%ymm7,%ymm7 vpsrlq $29,%ymm8,%ymm0 vpermq $0x93,%ymm10,%ymm10 vpand %ymm15,%ymm8,%ymm8 vpermq $0x93,%ymm11,%ymm11 vpblendd $3,%ymm14,%ymm12,%ymm9 vpermq $0x93,%ymm0,%ymm0 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm9,%ymm4,%ymm4 vpblendd $3,%ymm13,%ymm10,%ymm13 vpaddq %ymm12,%ymm5,%ymm5 vpblendd $3,%ymm10,%ymm11,%ymm10 vpaddq %ymm13,%ymm6,%ymm6 vpblendd $3,%ymm11,%ymm0,%ymm11 vpaddq %ymm10,%ymm7,%ymm7 vpaddq %ymm11,%ymm8,%ymm8 vpsrlq $29,%ymm4,%ymm12 vpand %ymm15,%ymm4,%ymm4 vpsrlq $29,%ymm5,%ymm13 vpand %ymm15,%ymm5,%ymm5 vpsrlq $29,%ymm6,%ymm10 vpermq $0x93,%ymm12,%ymm12 vpand %ymm15,%ymm6,%ymm6 vpsrlq $29,%ymm7,%ymm11 vpermq $0x93,%ymm13,%ymm13 vpand %ymm15,%ymm7,%ymm7 vpsrlq $29,%ymm8,%ymm0 vpermq $0x93,%ymm10,%ymm10 vpand %ymm15,%ymm8,%ymm8 vpermq $0x93,%ymm11,%ymm11 vpblendd $3,%ymm14,%ymm12,%ymm9 vpermq $0x93,%ymm0,%ymm0 vpblendd $3,%ymm12,%ymm13,%ymm12 vpaddq %ymm9,%ymm4,%ymm4 vpblendd $3,%ymm13,%ymm10,%ymm13 vpaddq %ymm12,%ymm5,%ymm5 vpblendd $3,%ymm10,%ymm11,%ymm10 vpaddq %ymm13,%ymm6,%ymm6 vpblendd $3,%ymm11,%ymm0,%ymm11 vpaddq %ymm10,%ymm7,%ymm7 vpaddq %ymm11,%ymm8,%ymm8 vmovdqu %ymm4,128-128(%rdi) vmovdqu %ymm5,160-128(%rdi) vmovdqu %ymm6,192-128(%rdi) vmovdqu %ymm7,224-128(%rdi) vmovdqu %ymm8,256-128(%rdi) vzeroupper movq %rbp,%rax .cfi_def_cfa_register %rax movq -48(%rax),%r15 .cfi_restore %r15 movq -40(%rax),%r14 .cfi_restore %r14 movq -32(%rax),%r13 .cfi_restore %r13 movq -24(%rax),%r12 .cfi_restore %r12 movq -16(%rax),%rbp .cfi_restore %rbp movq -8(%rax),%rbx .cfi_restore %rbx leaq (%rax),%rsp .cfi_def_cfa_register %rsp .Lmul_1024_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size rsaz_1024_mul_avx2,.-rsaz_1024_mul_avx2 .globl rsaz_1024_red2norm_avx2 .hidden rsaz_1024_red2norm_avx2 .type rsaz_1024_red2norm_avx2,@function .align 32 rsaz_1024_red2norm_avx2: .cfi_startproc _CET_ENDBR subq $-128,%rsi xorq %rax,%rax movq -128(%rsi),%r8 movq -120(%rsi),%r9 movq -112(%rsi),%r10 shlq $0,%r8 shlq $29,%r9 movq %r10,%r11 shlq $58,%r10 shrq $6,%r11 addq %r8,%rax addq %r9,%rax addq %r10,%rax adcq $0,%r11 movq %rax,0(%rdi) movq %r11,%rax movq -104(%rsi),%r8 movq -96(%rsi),%r9 shlq $23,%r8 movq %r9,%r10 shlq $52,%r9 shrq $12,%r10 addq %r8,%rax addq %r9,%rax adcq $0,%r10 movq %rax,8(%rdi) movq %r10,%rax movq -88(%rsi),%r11 movq -80(%rsi),%r8 shlq $17,%r11 movq %r8,%r9 shlq $46,%r8 shrq $18,%r9 addq %r11,%rax addq %r8,%rax adcq $0,%r9 movq %rax,16(%rdi) movq %r9,%rax movq -72(%rsi),%r10 movq -64(%rsi),%r11 shlq $11,%r10 movq %r11,%r8 shlq $40,%r11 shrq $24,%r8 addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,24(%rdi) movq %r8,%rax movq -56(%rsi),%r9 movq -48(%rsi),%r10 movq -40(%rsi),%r11 shlq $5,%r9 shlq $34,%r10 movq %r11,%r8 shlq $63,%r11 shrq $1,%r8 addq %r9,%rax addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,32(%rdi) movq %r8,%rax movq -32(%rsi),%r9 movq -24(%rsi),%r10 shlq $28,%r9 movq %r10,%r11 shlq $57,%r10 shrq $7,%r11 addq %r9,%rax addq %r10,%rax adcq $0,%r11 movq %rax,40(%rdi) movq %r11,%rax movq -16(%rsi),%r8 movq -8(%rsi),%r9 shlq $22,%r8 movq %r9,%r10 shlq $51,%r9 shrq $13,%r10 addq %r8,%rax addq %r9,%rax adcq $0,%r10 movq %rax,48(%rdi) movq %r10,%rax movq 0(%rsi),%r11 movq 8(%rsi),%r8 shlq $16,%r11 movq %r8,%r9 shlq $45,%r8 shrq $19,%r9 addq %r11,%rax addq %r8,%rax adcq $0,%r9 movq %rax,56(%rdi) movq %r9,%rax movq 16(%rsi),%r10 movq 24(%rsi),%r11 shlq $10,%r10 movq %r11,%r8 shlq $39,%r11 shrq $25,%r8 addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,64(%rdi) movq %r8,%rax movq 32(%rsi),%r9 movq 40(%rsi),%r10 movq 48(%rsi),%r11 shlq $4,%r9 shlq $33,%r10 movq %r11,%r8 shlq $62,%r11 shrq $2,%r8 addq %r9,%rax addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,72(%rdi) movq %r8,%rax movq 56(%rsi),%r9 movq 64(%rsi),%r10 shlq $27,%r9 movq %r10,%r11 shlq $56,%r10 shrq $8,%r11 addq %r9,%rax addq %r10,%rax adcq $0,%r11 movq %rax,80(%rdi) movq %r11,%rax movq 72(%rsi),%r8 movq 80(%rsi),%r9 shlq $21,%r8 movq %r9,%r10 shlq $50,%r9 shrq $14,%r10 addq %r8,%rax addq %r9,%rax adcq $0,%r10 movq %rax,88(%rdi) movq %r10,%rax movq 88(%rsi),%r11 movq 96(%rsi),%r8 shlq $15,%r11 movq %r8,%r9 shlq $44,%r8 shrq $20,%r9 addq %r11,%rax addq %r8,%rax adcq $0,%r9 movq %rax,96(%rdi) movq %r9,%rax movq 104(%rsi),%r10 movq 112(%rsi),%r11 shlq $9,%r10 movq %r11,%r8 shlq $38,%r11 shrq $26,%r8 addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,104(%rdi) movq %r8,%rax movq 120(%rsi),%r9 movq 128(%rsi),%r10 movq 136(%rsi),%r11 shlq $3,%r9 shlq $32,%r10 movq %r11,%r8 shlq $61,%r11 shrq $3,%r8 addq %r9,%rax addq %r10,%rax addq %r11,%rax adcq $0,%r8 movq %rax,112(%rdi) movq %r8,%rax movq 144(%rsi),%r9 movq 152(%rsi),%r10 shlq $26,%r9 movq %r10,%r11 shlq $55,%r10 shrq $9,%r11 addq %r9,%rax addq %r10,%rax adcq $0,%r11 movq %rax,120(%rdi) movq %r11,%rax .byte 0xf3,0xc3 .cfi_endproc .size rsaz_1024_red2norm_avx2,.-rsaz_1024_red2norm_avx2 .globl rsaz_1024_norm2red_avx2 .hidden rsaz_1024_norm2red_avx2 .type rsaz_1024_norm2red_avx2,@function .align 32 rsaz_1024_norm2red_avx2: .cfi_startproc _CET_ENDBR subq $-128,%rdi movq (%rsi),%r8 movl $0x1fffffff,%eax movq 8(%rsi),%r9 movq %r8,%r11 shrq $0,%r11 andq %rax,%r11 movq %r11,-128(%rdi) movq %r8,%r10 shrq $29,%r10 andq %rax,%r10 movq %r10,-120(%rdi) shrdq $58,%r9,%r8 andq %rax,%r8 movq %r8,-112(%rdi) movq 16(%rsi),%r10 movq %r9,%r8 shrq $23,%r8 andq %rax,%r8 movq %r8,-104(%rdi) shrdq $52,%r10,%r9 andq %rax,%r9 movq %r9,-96(%rdi) movq 24(%rsi),%r11 movq %r10,%r9 shrq $17,%r9 andq %rax,%r9 movq %r9,-88(%rdi) shrdq $46,%r11,%r10 andq %rax,%r10 movq %r10,-80(%rdi) movq 32(%rsi),%r8 movq %r11,%r10 shrq $11,%r10 andq %rax,%r10 movq %r10,-72(%rdi) shrdq $40,%r8,%r11 andq %rax,%r11 movq %r11,-64(%rdi) movq 40(%rsi),%r9 movq %r8,%r11 shrq $5,%r11 andq %rax,%r11 movq %r11,-56(%rdi) movq %r8,%r10 shrq $34,%r10 andq %rax,%r10 movq %r10,-48(%rdi) shrdq $63,%r9,%r8 andq %rax,%r8 movq %r8,-40(%rdi) movq 48(%rsi),%r10 movq %r9,%r8 shrq $28,%r8 andq %rax,%r8 movq %r8,-32(%rdi) shrdq $57,%r10,%r9 andq %rax,%r9 movq %r9,-24(%rdi) movq 56(%rsi),%r11 movq %r10,%r9 shrq $22,%r9 andq %rax,%r9 movq %r9,-16(%rdi) shrdq $51,%r11,%r10 andq %rax,%r10 movq %r10,-8(%rdi) movq 64(%rsi),%r8 movq %r11,%r10 shrq $16,%r10 andq %rax,%r10 movq %r10,0(%rdi) shrdq $45,%r8,%r11 andq %rax,%r11 movq %r11,8(%rdi) movq 72(%rsi),%r9 movq %r8,%r11 shrq $10,%r11 andq %rax,%r11 movq %r11,16(%rdi) shrdq $39,%r9,%r8 andq %rax,%r8 movq %r8,24(%rdi) movq 80(%rsi),%r10 movq %r9,%r8 shrq $4,%r8 andq %rax,%r8 movq %r8,32(%rdi) movq %r9,%r11 shrq $33,%r11 andq %rax,%r11 movq %r11,40(%rdi) shrdq $62,%r10,%r9 andq %rax,%r9 movq %r9,48(%rdi) movq 88(%rsi),%r11 movq %r10,%r9 shrq $27,%r9 andq %rax,%r9 movq %r9,56(%rdi) shrdq $56,%r11,%r10 andq %rax,%r10 movq %r10,64(%rdi) movq 96(%rsi),%r8 movq %r11,%r10 shrq $21,%r10 andq %rax,%r10 movq %r10,72(%rdi) shrdq $50,%r8,%r11 andq %rax,%r11 movq %r11,80(%rdi) movq 104(%rsi),%r9 movq %r8,%r11 shrq $15,%r11 andq %rax,%r11 movq %r11,88(%rdi) shrdq $44,%r9,%r8 andq %rax,%r8 movq %r8,96(%rdi) movq 112(%rsi),%r10 movq %r9,%r8 shrq $9,%r8 andq %rax,%r8 movq %r8,104(%rdi) shrdq $38,%r10,%r9 andq %rax,%r9 movq %r9,112(%rdi) movq 120(%rsi),%r11 movq %r10,%r9 shrq $3,%r9 andq %rax,%r9 movq %r9,120(%rdi) movq %r10,%r8 shrq $32,%r8 andq %rax,%r8 movq %r8,128(%rdi) shrdq $61,%r11,%r10 andq %rax,%r10 movq %r10,136(%rdi) xorq %r8,%r8 movq %r11,%r10 shrq $26,%r10 andq %rax,%r10 movq %r10,144(%rdi) shrdq $55,%r8,%r11 andq %rax,%r11 movq %r11,152(%rdi) movq %r8,160(%rdi) movq %r8,168(%rdi) movq %r8,176(%rdi) movq %r8,184(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size rsaz_1024_norm2red_avx2,.-rsaz_1024_norm2red_avx2 .globl rsaz_1024_scatter5_avx2 .hidden rsaz_1024_scatter5_avx2 .type rsaz_1024_scatter5_avx2,@function .align 32 rsaz_1024_scatter5_avx2: .cfi_startproc _CET_ENDBR vzeroupper vmovdqu .Lscatter_permd(%rip),%ymm5 shll $4,%edx leaq (%rdi,%rdx,1),%rdi movl $9,%eax jmp .Loop_scatter_1024 .align 32 .Loop_scatter_1024: vmovdqu (%rsi),%ymm0 leaq 32(%rsi),%rsi vpermd %ymm0,%ymm5,%ymm0 vmovdqu %xmm0,(%rdi) leaq 512(%rdi),%rdi decl %eax jnz .Loop_scatter_1024 vzeroupper .byte 0xf3,0xc3 .cfi_endproc .size rsaz_1024_scatter5_avx2,.-rsaz_1024_scatter5_avx2 .globl rsaz_1024_gather5_avx2 .hidden rsaz_1024_gather5_avx2 .type rsaz_1024_gather5_avx2,@function .align 32 rsaz_1024_gather5_avx2: .cfi_startproc _CET_ENDBR vzeroupper movq %rsp,%r11 .cfi_def_cfa_register %r11 leaq -256(%rsp),%rsp andq $-32,%rsp leaq .Linc(%rip),%r10 leaq -128(%rsp),%rax vmovd %edx,%xmm4 vmovdqa (%r10),%ymm0 vmovdqa 32(%r10),%ymm1 vmovdqa 64(%r10),%ymm5 vpbroadcastd %xmm4,%ymm4 vpaddd %ymm5,%ymm0,%ymm2 vpcmpeqd %ymm4,%ymm0,%ymm0 vpaddd %ymm5,%ymm1,%ymm3 vpcmpeqd %ymm4,%ymm1,%ymm1 vmovdqa %ymm0,0+128(%rax) vpaddd %ymm5,%ymm2,%ymm0 vpcmpeqd %ymm4,%ymm2,%ymm2 vmovdqa %ymm1,32+128(%rax) vpaddd %ymm5,%ymm3,%ymm1 vpcmpeqd %ymm4,%ymm3,%ymm3 vmovdqa %ymm2,64+128(%rax) vpaddd %ymm5,%ymm0,%ymm2 vpcmpeqd %ymm4,%ymm0,%ymm0 vmovdqa %ymm3,96+128(%rax) vpaddd %ymm5,%ymm1,%ymm3 vpcmpeqd %ymm4,%ymm1,%ymm1 vmovdqa %ymm0,128+128(%rax) vpaddd %ymm5,%ymm2,%ymm8 vpcmpeqd %ymm4,%ymm2,%ymm2 vmovdqa %ymm1,160+128(%rax) vpaddd %ymm5,%ymm3,%ymm9 vpcmpeqd %ymm4,%ymm3,%ymm3 vmovdqa %ymm2,192+128(%rax) vpaddd %ymm5,%ymm8,%ymm10 vpcmpeqd %ymm4,%ymm8,%ymm8 vmovdqa %ymm3,224+128(%rax) vpaddd %ymm5,%ymm9,%ymm11 vpcmpeqd %ymm4,%ymm9,%ymm9 vpaddd %ymm5,%ymm10,%ymm12 vpcmpeqd %ymm4,%ymm10,%ymm10 vpaddd %ymm5,%ymm11,%ymm13 vpcmpeqd %ymm4,%ymm11,%ymm11 vpaddd %ymm5,%ymm12,%ymm14 vpcmpeqd %ymm4,%ymm12,%ymm12 vpaddd %ymm5,%ymm13,%ymm15 vpcmpeqd %ymm4,%ymm13,%ymm13 vpcmpeqd %ymm4,%ymm14,%ymm14 vpcmpeqd %ymm4,%ymm15,%ymm15 vmovdqa -32(%r10),%ymm7 leaq 128(%rsi),%rsi movl $9,%edx .Loop_gather_1024: vmovdqa 0-128(%rsi),%ymm0 vmovdqa 32-128(%rsi),%ymm1 vmovdqa 64-128(%rsi),%ymm2 vmovdqa 96-128(%rsi),%ymm3 vpand 0+128(%rax),%ymm0,%ymm0 vpand 32+128(%rax),%ymm1,%ymm1 vpand 64+128(%rax),%ymm2,%ymm2 vpor %ymm0,%ymm1,%ymm4 vpand 96+128(%rax),%ymm3,%ymm3 vmovdqa 128-128(%rsi),%ymm0 vmovdqa 160-128(%rsi),%ymm1 vpor %ymm2,%ymm3,%ymm5 vmovdqa 192-128(%rsi),%ymm2 vmovdqa 224-128(%rsi),%ymm3 vpand 128+128(%rax),%ymm0,%ymm0 vpand 160+128(%rax),%ymm1,%ymm1 vpand 192+128(%rax),%ymm2,%ymm2 vpor %ymm0,%ymm4,%ymm4 vpand 224+128(%rax),%ymm3,%ymm3 vpand 256-128(%rsi),%ymm8,%ymm0 vpor %ymm1,%ymm5,%ymm5 vpand 288-128(%rsi),%ymm9,%ymm1 vpor %ymm2,%ymm4,%ymm4 vpand 320-128(%rsi),%ymm10,%ymm2 vpor %ymm3,%ymm5,%ymm5 vpand 352-128(%rsi),%ymm11,%ymm3 vpor %ymm0,%ymm4,%ymm4 vpand 384-128(%rsi),%ymm12,%ymm0 vpor %ymm1,%ymm5,%ymm5 vpand 416-128(%rsi),%ymm13,%ymm1 vpor %ymm2,%ymm4,%ymm4 vpand 448-128(%rsi),%ymm14,%ymm2 vpor %ymm3,%ymm5,%ymm5 vpand 480-128(%rsi),%ymm15,%ymm3 leaq 512(%rsi),%rsi vpor %ymm0,%ymm4,%ymm4 vpor %ymm1,%ymm5,%ymm5 vpor %ymm2,%ymm4,%ymm4 vpor %ymm3,%ymm5,%ymm5 vpor %ymm5,%ymm4,%ymm4 vextracti128 $1,%ymm4,%xmm5 vpor %xmm4,%xmm5,%xmm5 vpermd %ymm5,%ymm7,%ymm5 vmovdqu %ymm5,(%rdi) leaq 32(%rdi),%rdi decl %edx jnz .Loop_gather_1024 vpxor %ymm0,%ymm0,%ymm0 vmovdqu %ymm0,(%rdi) vzeroupper leaq (%r11),%rsp .cfi_def_cfa_register %rsp .byte 0xf3,0xc3 .cfi_endproc .LSEH_end_rsaz_1024_gather5: .size rsaz_1024_gather5_avx2,.-rsaz_1024_gather5_avx2 .section .rodata .align 64 .Land_mask: .quad 0x1fffffff,0x1fffffff,0x1fffffff,0x1fffffff .Lscatter_permd: .long 0,2,4,6,7,7,7,7 .Lgather_permd: .long 0,7,1,7,2,7,3,7 .Linc: .long 0,0,0,0, 1,1,1,1 .long 2,2,2,2, 3,3,3,3 .long 4,4,4,4, 4,4,4,4 .align 64 .text #endif
marvin-hansen/iggy-streaming-system
31,760
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/rsaz-4k-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl rsaz_amm52x40_x1_ifma256 .hidden rsaz_amm52x40_x1_ifma256 .type rsaz_amm52x40_x1_ifma256,@function .align 32 rsaz_amm52x40_x1_ifma256: .cfi_startproc .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm6 vmovdqa64 %ymm0,%ymm7 vmovdqa64 %ymm0,%ymm8 vmovdqa64 %ymm0,%ymm9 vmovdqa64 %ymm0,%ymm10 vmovdqa64 %ymm0,%ymm11 vmovdqa64 %ymm0,%ymm12 xorl %r9d,%r9d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $10,%ebx .align 32 .Lloop10: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 movq 8(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 movq 16(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 movq 24(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 leaq 32(%r11),%r11 decl %ebx jne .Lloop10 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm4,%ymm1 vpsrlq $52,%ymm5,%ymm2 vpsrlq $52,%ymm6,%ymm23 vpsrlq $52,%ymm7,%ymm24 vpsrlq $52,%ymm8,%ymm25 vpsrlq $52,%ymm9,%ymm26 vpsrlq $52,%ymm10,%ymm27 vpsrlq $52,%ymm11,%ymm28 vpsrlq $52,%ymm12,%ymm29 valignq $3,%ymm28,%ymm29,%ymm29 valignq $3,%ymm27,%ymm28,%ymm28 valignq $3,%ymm26,%ymm27,%ymm27 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm24,%ymm25,%ymm25 valignq $3,%ymm23,%ymm24,%ymm24 valignq $3,%ymm2,%ymm23,%ymm23 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm5,%ymm5 vpandq .Lmask52x4(%rip),%ymm6,%ymm6 vpandq .Lmask52x4(%rip),%ymm7,%ymm7 vpandq .Lmask52x4(%rip),%ymm8,%ymm8 vpandq .Lmask52x4(%rip),%ymm9,%ymm9 vpandq .Lmask52x4(%rip),%ymm10,%ymm10 vpandq .Lmask52x4(%rip),%ymm11,%ymm11 vpandq .Lmask52x4(%rip),%ymm12,%ymm12 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm4,%ymm4 vpaddq %ymm2,%ymm5,%ymm5 vpaddq %ymm23,%ymm6,%ymm6 vpaddq %ymm24,%ymm7,%ymm7 vpaddq %ymm25,%ymm8,%ymm8 vpaddq %ymm26,%ymm9,%ymm9 vpaddq %ymm27,%ymm10,%ymm10 vpaddq %ymm28,%ymm11,%ymm11 vpaddq %ymm29,%ymm12,%ymm12 vpcmpuq $6,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm4,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,.Lmask52x4(%rip),%ymm5,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm6,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,.Lmask52x4(%rip),%ymm7,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm8,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,.Lmask52x4(%rip),%ymm9,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm10,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b vpcmpuq $6,.Lmask52x4(%rip),%ymm11,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm12,%k2 kmovb %k1,%r10d kmovb %k2,%r9d shlb $4,%r9b orb %r9b,%r10b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b adcb %r10b,%r10b vpcmpuq $0,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm4,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,.Lmask52x4(%rip),%ymm5,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm6,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,.Lmask52x4(%rip),%ymm7,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm8,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,.Lmask52x4(%rip),%ymm9,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm10,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl vpcmpuq $0,.Lmask52x4(%rip),%ymm11,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm12,%k2 kmovb %k1,%ebx kmovb %k2,%eax shlb $4,%al orb %al,%bl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b adcb %bl,%r10b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b xorb %bl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq .Lmask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq .Lmask52x4(%rip),%ymm4,%ymm4{%k2} vpsubq .Lmask52x4(%rip),%ymm5,%ymm5{%k3} vpsubq .Lmask52x4(%rip),%ymm6,%ymm6{%k4} vpsubq .Lmask52x4(%rip),%ymm7,%ymm7{%k5} vpsubq .Lmask52x4(%rip),%ymm8,%ymm8{%k6} vpsubq .Lmask52x4(%rip),%ymm9,%ymm9{%k7} vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm5,%ymm5 vpandq .Lmask52x4(%rip),%ymm6,%ymm6 vpandq .Lmask52x4(%rip),%ymm7,%ymm7 vpandq .Lmask52x4(%rip),%ymm8,%ymm8 vpandq .Lmask52x4(%rip),%ymm9,%ymm9 shrb $4,%r11b kmovb %r11d,%k1 kmovb %r10d,%k2 shrb $4,%r10b kmovb %r10d,%k3 vpsubq .Lmask52x4(%rip),%ymm10,%ymm10{%k1} vpsubq .Lmask52x4(%rip),%ymm11,%ymm11{%k2} vpsubq .Lmask52x4(%rip),%ymm12,%ymm12{%k3} vpandq .Lmask52x4(%rip),%ymm10,%ymm10 vpandq .Lmask52x4(%rip),%ymm11,%ymm11 vpandq .Lmask52x4(%rip),%ymm12,%ymm12 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm4,32(%rdi) vmovdqu64 %ymm5,64(%rdi) vmovdqu64 %ymm6,96(%rdi) vmovdqu64 %ymm7,128(%rdi) vmovdqu64 %ymm8,160(%rdi) vmovdqu64 %ymm9,192(%rdi) vmovdqu64 %ymm10,224(%rdi) vmovdqu64 %ymm11,256(%rdi) vmovdqu64 %ymm12,288(%rdi) vzeroupper leaq (%rsp),%rax .cfi_def_cfa_register %rax movq 0(%rax),%r15 .cfi_restore %r15 movq 8(%rax),%r14 .cfi_restore %r14 movq 16(%rax),%r13 .cfi_restore %r13 movq 24(%rax),%r12 .cfi_restore %r12 movq 32(%rax),%rbp .cfi_restore %rbp movq 40(%rax),%rbx .cfi_restore %rbx leaq 48(%rax),%rsp .cfi_def_cfa %rsp,8 .Lrsaz_amm52x40_x1_ifma256_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size rsaz_amm52x40_x1_ifma256, .-rsaz_amm52x40_x1_ifma256 .section .rodata .align 32 .Lmask52x4: .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .text .text .globl rsaz_amm52x40_x2_ifma256 .hidden rsaz_amm52x40_x2_ifma256 .type rsaz_amm52x40_x2_ifma256,@function .align 32 rsaz_amm52x40_x2_ifma256: .cfi_startproc .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm6 vmovdqa64 %ymm0,%ymm7 vmovdqa64 %ymm0,%ymm8 vmovdqa64 %ymm0,%ymm9 vmovdqa64 %ymm0,%ymm10 vmovdqa64 %ymm0,%ymm11 vmovdqa64 %ymm0,%ymm12 vmovdqa64 %ymm0,%ymm13 vmovdqa64 %ymm0,%ymm14 vmovdqa64 %ymm0,%ymm15 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 vmovdqa64 %ymm0,%ymm20 vmovdqa64 %ymm0,%ymm21 vmovdqa64 %ymm0,%ymm22 xorl %r9d,%r9d xorl %r15d,%r15d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $40,%ebx .align 32 .Lloop40: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq (%r8),%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm11,%ymm10 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm0,%ymm12 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 movq 320(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 320(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 movq %r12,%r10 adcq $0,%r10 movq 8(%r8),%r13 imulq %r15,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 320(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 adcq %r12,%r10 shrq $52,%r15 salq $12,%r10 orq %r10,%r15 vpmadd52luq 320(%rsi),%ymm1,%ymm13 vpmadd52luq 352(%rsi),%ymm1,%ymm14 vpmadd52luq 384(%rsi),%ymm1,%ymm15 vpmadd52luq 416(%rsi),%ymm1,%ymm16 vpmadd52luq 448(%rsi),%ymm1,%ymm17 vpmadd52luq 480(%rsi),%ymm1,%ymm18 vpmadd52luq 512(%rsi),%ymm1,%ymm19 vpmadd52luq 544(%rsi),%ymm1,%ymm20 vpmadd52luq 576(%rsi),%ymm1,%ymm21 vpmadd52luq 608(%rsi),%ymm1,%ymm22 vpmadd52luq 320(%rcx),%ymm2,%ymm13 vpmadd52luq 352(%rcx),%ymm2,%ymm14 vpmadd52luq 384(%rcx),%ymm2,%ymm15 vpmadd52luq 416(%rcx),%ymm2,%ymm16 vpmadd52luq 448(%rcx),%ymm2,%ymm17 vpmadd52luq 480(%rcx),%ymm2,%ymm18 vpmadd52luq 512(%rcx),%ymm2,%ymm19 vpmadd52luq 544(%rcx),%ymm2,%ymm20 vpmadd52luq 576(%rcx),%ymm2,%ymm21 vpmadd52luq 608(%rcx),%ymm2,%ymm22 valignq $1,%ymm13,%ymm14,%ymm13 valignq $1,%ymm14,%ymm15,%ymm14 valignq $1,%ymm15,%ymm16,%ymm15 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm20,%ymm19 valignq $1,%ymm20,%ymm21,%ymm20 valignq $1,%ymm21,%ymm22,%ymm21 valignq $1,%ymm22,%ymm0,%ymm22 vmovq %xmm13,%r13 addq %r13,%r15 vpmadd52huq 320(%rsi),%ymm1,%ymm13 vpmadd52huq 352(%rsi),%ymm1,%ymm14 vpmadd52huq 384(%rsi),%ymm1,%ymm15 vpmadd52huq 416(%rsi),%ymm1,%ymm16 vpmadd52huq 448(%rsi),%ymm1,%ymm17 vpmadd52huq 480(%rsi),%ymm1,%ymm18 vpmadd52huq 512(%rsi),%ymm1,%ymm19 vpmadd52huq 544(%rsi),%ymm1,%ymm20 vpmadd52huq 576(%rsi),%ymm1,%ymm21 vpmadd52huq 608(%rsi),%ymm1,%ymm22 vpmadd52huq 320(%rcx),%ymm2,%ymm13 vpmadd52huq 352(%rcx),%ymm2,%ymm14 vpmadd52huq 384(%rcx),%ymm2,%ymm15 vpmadd52huq 416(%rcx),%ymm2,%ymm16 vpmadd52huq 448(%rcx),%ymm2,%ymm17 vpmadd52huq 480(%rcx),%ymm2,%ymm18 vpmadd52huq 512(%rcx),%ymm2,%ymm19 vpmadd52huq 544(%rcx),%ymm2,%ymm20 vpmadd52huq 576(%rcx),%ymm2,%ymm21 vpmadd52huq 608(%rcx),%ymm2,%ymm22 leaq 8(%r11),%r11 decl %ebx jne .Lloop40 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm4,%ymm1 vpsrlq $52,%ymm5,%ymm2 vpsrlq $52,%ymm6,%ymm23 vpsrlq $52,%ymm7,%ymm24 vpsrlq $52,%ymm8,%ymm25 vpsrlq $52,%ymm9,%ymm26 vpsrlq $52,%ymm10,%ymm27 vpsrlq $52,%ymm11,%ymm28 vpsrlq $52,%ymm12,%ymm29 valignq $3,%ymm28,%ymm29,%ymm29 valignq $3,%ymm27,%ymm28,%ymm28 valignq $3,%ymm26,%ymm27,%ymm27 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm24,%ymm25,%ymm25 valignq $3,%ymm23,%ymm24,%ymm24 valignq $3,%ymm2,%ymm23,%ymm23 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm5,%ymm5 vpandq .Lmask52x4(%rip),%ymm6,%ymm6 vpandq .Lmask52x4(%rip),%ymm7,%ymm7 vpandq .Lmask52x4(%rip),%ymm8,%ymm8 vpandq .Lmask52x4(%rip),%ymm9,%ymm9 vpandq .Lmask52x4(%rip),%ymm10,%ymm10 vpandq .Lmask52x4(%rip),%ymm11,%ymm11 vpandq .Lmask52x4(%rip),%ymm12,%ymm12 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm4,%ymm4 vpaddq %ymm2,%ymm5,%ymm5 vpaddq %ymm23,%ymm6,%ymm6 vpaddq %ymm24,%ymm7,%ymm7 vpaddq %ymm25,%ymm8,%ymm8 vpaddq %ymm26,%ymm9,%ymm9 vpaddq %ymm27,%ymm10,%ymm10 vpaddq %ymm28,%ymm11,%ymm11 vpaddq %ymm29,%ymm12,%ymm12 vpcmpuq $6,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm4,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,.Lmask52x4(%rip),%ymm5,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm6,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,.Lmask52x4(%rip),%ymm7,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm8,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,.Lmask52x4(%rip),%ymm9,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm10,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b vpcmpuq $6,.Lmask52x4(%rip),%ymm11,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm12,%k2 kmovb %k1,%r10d kmovb %k2,%r9d shlb $4,%r9b orb %r9b,%r10b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b adcb %r10b,%r10b vpcmpuq $0,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm4,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,.Lmask52x4(%rip),%ymm5,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm6,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,.Lmask52x4(%rip),%ymm7,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm8,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,.Lmask52x4(%rip),%ymm9,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm10,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl vpcmpuq $0,.Lmask52x4(%rip),%ymm11,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm12,%k2 kmovb %k1,%ebx kmovb %k2,%eax shlb $4,%al orb %al,%bl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b adcb %bl,%r10b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b xorb %bl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq .Lmask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq .Lmask52x4(%rip),%ymm4,%ymm4{%k2} vpsubq .Lmask52x4(%rip),%ymm5,%ymm5{%k3} vpsubq .Lmask52x4(%rip),%ymm6,%ymm6{%k4} vpsubq .Lmask52x4(%rip),%ymm7,%ymm7{%k5} vpsubq .Lmask52x4(%rip),%ymm8,%ymm8{%k6} vpsubq .Lmask52x4(%rip),%ymm9,%ymm9{%k7} vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm5,%ymm5 vpandq .Lmask52x4(%rip),%ymm6,%ymm6 vpandq .Lmask52x4(%rip),%ymm7,%ymm7 vpandq .Lmask52x4(%rip),%ymm8,%ymm8 vpandq .Lmask52x4(%rip),%ymm9,%ymm9 shrb $4,%r11b kmovb %r11d,%k1 kmovb %r10d,%k2 shrb $4,%r10b kmovb %r10d,%k3 vpsubq .Lmask52x4(%rip),%ymm10,%ymm10{%k1} vpsubq .Lmask52x4(%rip),%ymm11,%ymm11{%k2} vpsubq .Lmask52x4(%rip),%ymm12,%ymm12{%k3} vpandq .Lmask52x4(%rip),%ymm10,%ymm10 vpandq .Lmask52x4(%rip),%ymm11,%ymm11 vpandq .Lmask52x4(%rip),%ymm12,%ymm12 vpbroadcastq %r15,%ymm0 vpblendd $3,%ymm0,%ymm13,%ymm13 vpsrlq $52,%ymm13,%ymm0 vpsrlq $52,%ymm14,%ymm1 vpsrlq $52,%ymm15,%ymm2 vpsrlq $52,%ymm16,%ymm23 vpsrlq $52,%ymm17,%ymm24 vpsrlq $52,%ymm18,%ymm25 vpsrlq $52,%ymm19,%ymm26 vpsrlq $52,%ymm20,%ymm27 vpsrlq $52,%ymm21,%ymm28 vpsrlq $52,%ymm22,%ymm29 valignq $3,%ymm28,%ymm29,%ymm29 valignq $3,%ymm27,%ymm28,%ymm28 valignq $3,%ymm26,%ymm27,%ymm27 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm24,%ymm25,%ymm25 valignq $3,%ymm23,%ymm24,%ymm24 valignq $3,%ymm2,%ymm23,%ymm23 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm13,%ymm13 vpandq .Lmask52x4(%rip),%ymm14,%ymm14 vpandq .Lmask52x4(%rip),%ymm15,%ymm15 vpandq .Lmask52x4(%rip),%ymm16,%ymm16 vpandq .Lmask52x4(%rip),%ymm17,%ymm17 vpandq .Lmask52x4(%rip),%ymm18,%ymm18 vpandq .Lmask52x4(%rip),%ymm19,%ymm19 vpandq .Lmask52x4(%rip),%ymm20,%ymm20 vpandq .Lmask52x4(%rip),%ymm21,%ymm21 vpandq .Lmask52x4(%rip),%ymm22,%ymm22 vpaddq %ymm0,%ymm13,%ymm13 vpaddq %ymm1,%ymm14,%ymm14 vpaddq %ymm2,%ymm15,%ymm15 vpaddq %ymm23,%ymm16,%ymm16 vpaddq %ymm24,%ymm17,%ymm17 vpaddq %ymm25,%ymm18,%ymm18 vpaddq %ymm26,%ymm19,%ymm19 vpaddq %ymm27,%ymm20,%ymm20 vpaddq %ymm28,%ymm21,%ymm21 vpaddq %ymm29,%ymm22,%ymm22 vpcmpuq $6,.Lmask52x4(%rip),%ymm13,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm14,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,.Lmask52x4(%rip),%ymm15,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm16,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,.Lmask52x4(%rip),%ymm17,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm18,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,.Lmask52x4(%rip),%ymm19,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm20,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b vpcmpuq $6,.Lmask52x4(%rip),%ymm21,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm22,%k2 kmovb %k1,%r10d kmovb %k2,%r9d shlb $4,%r9b orb %r9b,%r10b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b adcb %r10b,%r10b vpcmpuq $0,.Lmask52x4(%rip),%ymm13,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm14,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,.Lmask52x4(%rip),%ymm15,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm16,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,.Lmask52x4(%rip),%ymm17,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm18,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,.Lmask52x4(%rip),%ymm19,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm20,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl vpcmpuq $0,.Lmask52x4(%rip),%ymm21,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm22,%k2 kmovb %k1,%ebx kmovb %k2,%eax shlb $4,%al orb %al,%bl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b adcb %bl,%r10b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b xorb %bl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq .Lmask52x4(%rip),%ymm13,%ymm13{%k1} vpsubq .Lmask52x4(%rip),%ymm14,%ymm14{%k2} vpsubq .Lmask52x4(%rip),%ymm15,%ymm15{%k3} vpsubq .Lmask52x4(%rip),%ymm16,%ymm16{%k4} vpsubq .Lmask52x4(%rip),%ymm17,%ymm17{%k5} vpsubq .Lmask52x4(%rip),%ymm18,%ymm18{%k6} vpsubq .Lmask52x4(%rip),%ymm19,%ymm19{%k7} vpandq .Lmask52x4(%rip),%ymm13,%ymm13 vpandq .Lmask52x4(%rip),%ymm14,%ymm14 vpandq .Lmask52x4(%rip),%ymm15,%ymm15 vpandq .Lmask52x4(%rip),%ymm16,%ymm16 vpandq .Lmask52x4(%rip),%ymm17,%ymm17 vpandq .Lmask52x4(%rip),%ymm18,%ymm18 vpandq .Lmask52x4(%rip),%ymm19,%ymm19 shrb $4,%r11b kmovb %r11d,%k1 kmovb %r10d,%k2 shrb $4,%r10b kmovb %r10d,%k3 vpsubq .Lmask52x4(%rip),%ymm20,%ymm20{%k1} vpsubq .Lmask52x4(%rip),%ymm21,%ymm21{%k2} vpsubq .Lmask52x4(%rip),%ymm22,%ymm22{%k3} vpandq .Lmask52x4(%rip),%ymm20,%ymm20 vpandq .Lmask52x4(%rip),%ymm21,%ymm21 vpandq .Lmask52x4(%rip),%ymm22,%ymm22 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm4,32(%rdi) vmovdqu64 %ymm5,64(%rdi) vmovdqu64 %ymm6,96(%rdi) vmovdqu64 %ymm7,128(%rdi) vmovdqu64 %ymm8,160(%rdi) vmovdqu64 %ymm9,192(%rdi) vmovdqu64 %ymm10,224(%rdi) vmovdqu64 %ymm11,256(%rdi) vmovdqu64 %ymm12,288(%rdi) vmovdqu64 %ymm13,320(%rdi) vmovdqu64 %ymm14,352(%rdi) vmovdqu64 %ymm15,384(%rdi) vmovdqu64 %ymm16,416(%rdi) vmovdqu64 %ymm17,448(%rdi) vmovdqu64 %ymm18,480(%rdi) vmovdqu64 %ymm19,512(%rdi) vmovdqu64 %ymm20,544(%rdi) vmovdqu64 %ymm21,576(%rdi) vmovdqu64 %ymm22,608(%rdi) vzeroupper leaq (%rsp),%rax .cfi_def_cfa_register %rax movq 0(%rax),%r15 .cfi_restore %r15 movq 8(%rax),%r14 .cfi_restore %r14 movq 16(%rax),%r13 .cfi_restore %r13 movq 24(%rax),%r12 .cfi_restore %r12 movq 32(%rax),%rbp .cfi_restore %rbp movq 40(%rax),%rbx .cfi_restore %rbx leaq 48(%rax),%rsp .cfi_def_cfa %rsp,8 .Lrsaz_amm52x40_x2_ifma256_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size rsaz_amm52x40_x2_ifma256, .-rsaz_amm52x40_x2_ifma256 .text .align 32 .globl extract_multiplier_2x40_win5 .hidden extract_multiplier_2x40_win5 .type extract_multiplier_2x40_win5,@function extract_multiplier_2x40_win5: .cfi_startproc .byte 243,15,30,250 vmovdqa64 .Lones(%rip),%ymm24 vpbroadcastq %rdx,%ymm22 vpbroadcastq %rcx,%ymm23 leaq 20480(%rsi),%rax movq %rsi,%r10 vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %ymm0,%ymm1 vmovdqa64 %ymm0,%ymm2 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 vpxorq %ymm21,%ymm21,%ymm21 .align 32 .Lloop_0: vpcmpq $0,%ymm21,%ymm22,%k1 vmovdqu64 0(%rsi),%ymm20 vpblendmq %ymm20,%ymm0,%ymm0{%k1} vmovdqu64 32(%rsi),%ymm20 vpblendmq %ymm20,%ymm1,%ymm1{%k1} vmovdqu64 64(%rsi),%ymm20 vpblendmq %ymm20,%ymm2,%ymm2{%k1} vmovdqu64 96(%rsi),%ymm20 vpblendmq %ymm20,%ymm3,%ymm3{%k1} vmovdqu64 128(%rsi),%ymm20 vpblendmq %ymm20,%ymm4,%ymm4{%k1} vmovdqu64 160(%rsi),%ymm20 vpblendmq %ymm20,%ymm5,%ymm5{%k1} vmovdqu64 192(%rsi),%ymm20 vpblendmq %ymm20,%ymm16,%ymm16{%k1} vmovdqu64 224(%rsi),%ymm20 vpblendmq %ymm20,%ymm17,%ymm17{%k1} vmovdqu64 256(%rsi),%ymm20 vpblendmq %ymm20,%ymm18,%ymm18{%k1} vmovdqu64 288(%rsi),%ymm20 vpblendmq %ymm20,%ymm19,%ymm19{%k1} vpaddq %ymm24,%ymm21,%ymm21 addq $640,%rsi cmpq %rsi,%rax jne .Lloop_0 vmovdqu64 %ymm0,0(%rdi) vmovdqu64 %ymm1,32(%rdi) vmovdqu64 %ymm2,64(%rdi) vmovdqu64 %ymm3,96(%rdi) vmovdqu64 %ymm4,128(%rdi) vmovdqu64 %ymm5,160(%rdi) vmovdqu64 %ymm16,192(%rdi) vmovdqu64 %ymm17,224(%rdi) vmovdqu64 %ymm18,256(%rdi) vmovdqu64 %ymm19,288(%rdi) movq %r10,%rsi vpxorq %ymm21,%ymm21,%ymm21 .align 32 .Lloop_320: vpcmpq $0,%ymm21,%ymm23,%k1 vmovdqu64 320(%rsi),%ymm20 vpblendmq %ymm20,%ymm0,%ymm0{%k1} vmovdqu64 352(%rsi),%ymm20 vpblendmq %ymm20,%ymm1,%ymm1{%k1} vmovdqu64 384(%rsi),%ymm20 vpblendmq %ymm20,%ymm2,%ymm2{%k1} vmovdqu64 416(%rsi),%ymm20 vpblendmq %ymm20,%ymm3,%ymm3{%k1} vmovdqu64 448(%rsi),%ymm20 vpblendmq %ymm20,%ymm4,%ymm4{%k1} vmovdqu64 480(%rsi),%ymm20 vpblendmq %ymm20,%ymm5,%ymm5{%k1} vmovdqu64 512(%rsi),%ymm20 vpblendmq %ymm20,%ymm16,%ymm16{%k1} vmovdqu64 544(%rsi),%ymm20 vpblendmq %ymm20,%ymm17,%ymm17{%k1} vmovdqu64 576(%rsi),%ymm20 vpblendmq %ymm20,%ymm18,%ymm18{%k1} vmovdqu64 608(%rsi),%ymm20 vpblendmq %ymm20,%ymm19,%ymm19{%k1} vpaddq %ymm24,%ymm21,%ymm21 addq $640,%rsi cmpq %rsi,%rax jne .Lloop_320 vmovdqu64 %ymm0,320(%rdi) vmovdqu64 %ymm1,352(%rdi) vmovdqu64 %ymm2,384(%rdi) vmovdqu64 %ymm3,416(%rdi) vmovdqu64 %ymm4,448(%rdi) vmovdqu64 %ymm5,480(%rdi) vmovdqu64 %ymm16,512(%rdi) vmovdqu64 %ymm17,544(%rdi) vmovdqu64 %ymm18,576(%rdi) vmovdqu64 %ymm19,608(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size extract_multiplier_2x40_win5, .-extract_multiplier_2x40_win5 .section .rodata .align 32 .Lones: .quad 1,1,1,1 .Lzeros: .quad 0,0,0,0 .text #endif #endif
marvin-hansen/iggy-streaming-system
29,950
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/rsaz-3k-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl rsaz_amm52x30_x1_ifma256 .hidden rsaz_amm52x30_x1_ifma256 .type rsaz_amm52x30_x1_ifma256,@function .align 32 rsaz_amm52x30_x1_ifma256: .cfi_startproc .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm6 vmovdqa64 %ymm0,%ymm7 vmovdqa64 %ymm0,%ymm8 vmovdqa64 %ymm0,%ymm9 vmovdqa64 %ymm0,%ymm10 xorl %r9d,%r9d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $7,%ebx .align 32 .Lloop7: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 8(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 16(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 24(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 leaq 32(%r11),%r11 decl %ebx jne .Lloop7 movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 8(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm4,%ymm1 vpsrlq $52,%ymm5,%ymm2 vpsrlq $52,%ymm6,%ymm19 vpsrlq $52,%ymm7,%ymm20 vpsrlq $52,%ymm8,%ymm21 vpsrlq $52,%ymm9,%ymm22 vpsrlq $52,%ymm10,%ymm23 valignq $3,%ymm22,%ymm23,%ymm23 valignq $3,%ymm21,%ymm22,%ymm22 valignq $3,%ymm20,%ymm21,%ymm21 valignq $3,%ymm19,%ymm20,%ymm20 valignq $3,%ymm2,%ymm19,%ymm19 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm5,%ymm5 vpandq .Lmask52x4(%rip),%ymm6,%ymm6 vpandq .Lmask52x4(%rip),%ymm7,%ymm7 vpandq .Lmask52x4(%rip),%ymm8,%ymm8 vpandq .Lmask52x4(%rip),%ymm9,%ymm9 vpandq .Lmask52x4(%rip),%ymm10,%ymm10 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm4,%ymm4 vpaddq %ymm2,%ymm5,%ymm5 vpaddq %ymm19,%ymm6,%ymm6 vpaddq %ymm20,%ymm7,%ymm7 vpaddq %ymm21,%ymm8,%ymm8 vpaddq %ymm22,%ymm9,%ymm9 vpaddq %ymm23,%ymm10,%ymm10 vpcmpuq $6,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm4,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,.Lmask52x4(%rip),%ymm5,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm6,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,.Lmask52x4(%rip),%ymm7,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm8,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,.Lmask52x4(%rip),%ymm9,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm10,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b vpcmpuq $0,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm4,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,.Lmask52x4(%rip),%ymm5,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm6,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,.Lmask52x4(%rip),%ymm7,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm8,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,.Lmask52x4(%rip),%ymm9,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm10,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq .Lmask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq .Lmask52x4(%rip),%ymm4,%ymm4{%k2} vpsubq .Lmask52x4(%rip),%ymm5,%ymm5{%k3} vpsubq .Lmask52x4(%rip),%ymm6,%ymm6{%k4} vpsubq .Lmask52x4(%rip),%ymm7,%ymm7{%k5} vpsubq .Lmask52x4(%rip),%ymm8,%ymm8{%k6} vpsubq .Lmask52x4(%rip),%ymm9,%ymm9{%k7} vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm5,%ymm5 vpandq .Lmask52x4(%rip),%ymm6,%ymm6 vpandq .Lmask52x4(%rip),%ymm7,%ymm7 vpandq .Lmask52x4(%rip),%ymm8,%ymm8 vpandq .Lmask52x4(%rip),%ymm9,%ymm9 shrb $4,%r11b kmovb %r11d,%k1 vpsubq .Lmask52x4(%rip),%ymm10,%ymm10{%k1} vpandq .Lmask52x4(%rip),%ymm10,%ymm10 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm4,32(%rdi) vmovdqu64 %ymm5,64(%rdi) vmovdqu64 %ymm6,96(%rdi) vmovdqu64 %ymm7,128(%rdi) vmovdqu64 %ymm8,160(%rdi) vmovdqu64 %ymm9,192(%rdi) vmovdqu64 %ymm10,224(%rdi) vzeroupper leaq (%rsp),%rax .cfi_def_cfa_register %rax movq 0(%rax),%r15 .cfi_restore %r15 movq 8(%rax),%r14 .cfi_restore %r14 movq 16(%rax),%r13 .cfi_restore %r13 movq 24(%rax),%r12 .cfi_restore %r12 movq 32(%rax),%rbp .cfi_restore %rbp movq 40(%rax),%rbx .cfi_restore %rbx leaq 48(%rax),%rsp .cfi_def_cfa %rsp,8 .Lrsaz_amm52x30_x1_ifma256_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size rsaz_amm52x30_x1_ifma256, .-rsaz_amm52x30_x1_ifma256 .section .rodata .align 32 .Lmask52x4: .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .text .text .globl rsaz_amm52x30_x2_ifma256 .hidden rsaz_amm52x30_x2_ifma256 .type rsaz_amm52x30_x2_ifma256,@function .align 32 rsaz_amm52x30_x2_ifma256: .cfi_startproc .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm6 vmovdqa64 %ymm0,%ymm7 vmovdqa64 %ymm0,%ymm8 vmovdqa64 %ymm0,%ymm9 vmovdqa64 %ymm0,%ymm10 vmovdqa64 %ymm0,%ymm11 vmovdqa64 %ymm0,%ymm12 vmovdqa64 %ymm0,%ymm13 vmovdqa64 %ymm0,%ymm14 vmovdqa64 %ymm0,%ymm15 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 xorl %r9d,%r9d xorl %r15d,%r15d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $30,%ebx .align 32 .Lloop30: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq (%r8),%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm4 vpmadd52luq 64(%rsi),%ymm1,%ymm5 vpmadd52luq 96(%rsi),%ymm1,%ymm6 vpmadd52luq 128(%rsi),%ymm1,%ymm7 vpmadd52luq 160(%rsi),%ymm1,%ymm8 vpmadd52luq 192(%rsi),%ymm1,%ymm9 vpmadd52luq 224(%rsi),%ymm1,%ymm10 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm4 vpmadd52luq 64(%rcx),%ymm2,%ymm5 vpmadd52luq 96(%rcx),%ymm2,%ymm6 vpmadd52luq 128(%rcx),%ymm2,%ymm7 vpmadd52luq 160(%rcx),%ymm2,%ymm8 vpmadd52luq 192(%rcx),%ymm2,%ymm9 vpmadd52luq 224(%rcx),%ymm2,%ymm10 valignq $1,%ymm3,%ymm4,%ymm3 valignq $1,%ymm4,%ymm5,%ymm4 valignq $1,%ymm5,%ymm6,%ymm5 valignq $1,%ymm6,%ymm7,%ymm6 valignq $1,%ymm7,%ymm8,%ymm7 valignq $1,%ymm8,%ymm9,%ymm8 valignq $1,%ymm9,%ymm10,%ymm9 valignq $1,%ymm10,%ymm0,%ymm10 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm4 vpmadd52huq 64(%rsi),%ymm1,%ymm5 vpmadd52huq 96(%rsi),%ymm1,%ymm6 vpmadd52huq 128(%rsi),%ymm1,%ymm7 vpmadd52huq 160(%rsi),%ymm1,%ymm8 vpmadd52huq 192(%rsi),%ymm1,%ymm9 vpmadd52huq 224(%rsi),%ymm1,%ymm10 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm4 vpmadd52huq 64(%rcx),%ymm2,%ymm5 vpmadd52huq 96(%rcx),%ymm2,%ymm6 vpmadd52huq 128(%rcx),%ymm2,%ymm7 vpmadd52huq 160(%rcx),%ymm2,%ymm8 vpmadd52huq 192(%rcx),%ymm2,%ymm9 vpmadd52huq 224(%rcx),%ymm2,%ymm10 movq 256(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 256(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 movq %r12,%r10 adcq $0,%r10 movq 8(%r8),%r13 imulq %r15,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 256(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 adcq %r12,%r10 shrq $52,%r15 salq $12,%r10 orq %r10,%r15 vpmadd52luq 256(%rsi),%ymm1,%ymm11 vpmadd52luq 288(%rsi),%ymm1,%ymm12 vpmadd52luq 320(%rsi),%ymm1,%ymm13 vpmadd52luq 352(%rsi),%ymm1,%ymm14 vpmadd52luq 384(%rsi),%ymm1,%ymm15 vpmadd52luq 416(%rsi),%ymm1,%ymm16 vpmadd52luq 448(%rsi),%ymm1,%ymm17 vpmadd52luq 480(%rsi),%ymm1,%ymm18 vpmadd52luq 256(%rcx),%ymm2,%ymm11 vpmadd52luq 288(%rcx),%ymm2,%ymm12 vpmadd52luq 320(%rcx),%ymm2,%ymm13 vpmadd52luq 352(%rcx),%ymm2,%ymm14 vpmadd52luq 384(%rcx),%ymm2,%ymm15 vpmadd52luq 416(%rcx),%ymm2,%ymm16 vpmadd52luq 448(%rcx),%ymm2,%ymm17 vpmadd52luq 480(%rcx),%ymm2,%ymm18 valignq $1,%ymm11,%ymm12,%ymm11 valignq $1,%ymm12,%ymm13,%ymm12 valignq $1,%ymm13,%ymm14,%ymm13 valignq $1,%ymm14,%ymm15,%ymm14 valignq $1,%ymm15,%ymm16,%ymm15 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm0,%ymm18 vmovq %xmm11,%r13 addq %r13,%r15 vpmadd52huq 256(%rsi),%ymm1,%ymm11 vpmadd52huq 288(%rsi),%ymm1,%ymm12 vpmadd52huq 320(%rsi),%ymm1,%ymm13 vpmadd52huq 352(%rsi),%ymm1,%ymm14 vpmadd52huq 384(%rsi),%ymm1,%ymm15 vpmadd52huq 416(%rsi),%ymm1,%ymm16 vpmadd52huq 448(%rsi),%ymm1,%ymm17 vpmadd52huq 480(%rsi),%ymm1,%ymm18 vpmadd52huq 256(%rcx),%ymm2,%ymm11 vpmadd52huq 288(%rcx),%ymm2,%ymm12 vpmadd52huq 320(%rcx),%ymm2,%ymm13 vpmadd52huq 352(%rcx),%ymm2,%ymm14 vpmadd52huq 384(%rcx),%ymm2,%ymm15 vpmadd52huq 416(%rcx),%ymm2,%ymm16 vpmadd52huq 448(%rcx),%ymm2,%ymm17 vpmadd52huq 480(%rcx),%ymm2,%ymm18 leaq 8(%r11),%r11 decl %ebx jne .Lloop30 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm4,%ymm1 vpsrlq $52,%ymm5,%ymm2 vpsrlq $52,%ymm6,%ymm19 vpsrlq $52,%ymm7,%ymm20 vpsrlq $52,%ymm8,%ymm21 vpsrlq $52,%ymm9,%ymm22 vpsrlq $52,%ymm10,%ymm23 valignq $3,%ymm22,%ymm23,%ymm23 valignq $3,%ymm21,%ymm22,%ymm22 valignq $3,%ymm20,%ymm21,%ymm21 valignq $3,%ymm19,%ymm20,%ymm20 valignq $3,%ymm2,%ymm19,%ymm19 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm5,%ymm5 vpandq .Lmask52x4(%rip),%ymm6,%ymm6 vpandq .Lmask52x4(%rip),%ymm7,%ymm7 vpandq .Lmask52x4(%rip),%ymm8,%ymm8 vpandq .Lmask52x4(%rip),%ymm9,%ymm9 vpandq .Lmask52x4(%rip),%ymm10,%ymm10 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm4,%ymm4 vpaddq %ymm2,%ymm5,%ymm5 vpaddq %ymm19,%ymm6,%ymm6 vpaddq %ymm20,%ymm7,%ymm7 vpaddq %ymm21,%ymm8,%ymm8 vpaddq %ymm22,%ymm9,%ymm9 vpaddq %ymm23,%ymm10,%ymm10 vpcmpuq $6,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm4,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,.Lmask52x4(%rip),%ymm5,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm6,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,.Lmask52x4(%rip),%ymm7,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm8,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,.Lmask52x4(%rip),%ymm9,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm10,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b vpcmpuq $0,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm4,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,.Lmask52x4(%rip),%ymm5,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm6,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,.Lmask52x4(%rip),%ymm7,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm8,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,.Lmask52x4(%rip),%ymm9,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm10,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq .Lmask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq .Lmask52x4(%rip),%ymm4,%ymm4{%k2} vpsubq .Lmask52x4(%rip),%ymm5,%ymm5{%k3} vpsubq .Lmask52x4(%rip),%ymm6,%ymm6{%k4} vpsubq .Lmask52x4(%rip),%ymm7,%ymm7{%k5} vpsubq .Lmask52x4(%rip),%ymm8,%ymm8{%k6} vpsubq .Lmask52x4(%rip),%ymm9,%ymm9{%k7} vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm5,%ymm5 vpandq .Lmask52x4(%rip),%ymm6,%ymm6 vpandq .Lmask52x4(%rip),%ymm7,%ymm7 vpandq .Lmask52x4(%rip),%ymm8,%ymm8 vpandq .Lmask52x4(%rip),%ymm9,%ymm9 shrb $4,%r11b kmovb %r11d,%k1 vpsubq .Lmask52x4(%rip),%ymm10,%ymm10{%k1} vpandq .Lmask52x4(%rip),%ymm10,%ymm10 vpbroadcastq %r15,%ymm0 vpblendd $3,%ymm0,%ymm11,%ymm11 vpsrlq $52,%ymm11,%ymm0 vpsrlq $52,%ymm12,%ymm1 vpsrlq $52,%ymm13,%ymm2 vpsrlq $52,%ymm14,%ymm19 vpsrlq $52,%ymm15,%ymm20 vpsrlq $52,%ymm16,%ymm21 vpsrlq $52,%ymm17,%ymm22 vpsrlq $52,%ymm18,%ymm23 valignq $3,%ymm22,%ymm23,%ymm23 valignq $3,%ymm21,%ymm22,%ymm22 valignq $3,%ymm20,%ymm21,%ymm21 valignq $3,%ymm19,%ymm20,%ymm20 valignq $3,%ymm2,%ymm19,%ymm19 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm11,%ymm11 vpandq .Lmask52x4(%rip),%ymm12,%ymm12 vpandq .Lmask52x4(%rip),%ymm13,%ymm13 vpandq .Lmask52x4(%rip),%ymm14,%ymm14 vpandq .Lmask52x4(%rip),%ymm15,%ymm15 vpandq .Lmask52x4(%rip),%ymm16,%ymm16 vpandq .Lmask52x4(%rip),%ymm17,%ymm17 vpandq .Lmask52x4(%rip),%ymm18,%ymm18 vpaddq %ymm0,%ymm11,%ymm11 vpaddq %ymm1,%ymm12,%ymm12 vpaddq %ymm2,%ymm13,%ymm13 vpaddq %ymm19,%ymm14,%ymm14 vpaddq %ymm20,%ymm15,%ymm15 vpaddq %ymm21,%ymm16,%ymm16 vpaddq %ymm22,%ymm17,%ymm17 vpaddq %ymm23,%ymm18,%ymm18 vpcmpuq $6,.Lmask52x4(%rip),%ymm11,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm12,%k2 kmovb %k1,%r14d kmovb %k2,%r13d shlb $4,%r13b orb %r13b,%r14b vpcmpuq $6,.Lmask52x4(%rip),%ymm13,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm14,%k2 kmovb %k1,%r13d kmovb %k2,%r12d shlb $4,%r12b orb %r12b,%r13b vpcmpuq $6,.Lmask52x4(%rip),%ymm15,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm16,%k2 kmovb %k1,%r12d kmovb %k2,%r11d shlb $4,%r11b orb %r11b,%r12b vpcmpuq $6,.Lmask52x4(%rip),%ymm17,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm18,%k2 kmovb %k1,%r11d kmovb %k2,%r10d shlb $4,%r10b orb %r10b,%r11b addb %r14b,%r14b adcb %r13b,%r13b adcb %r12b,%r12b adcb %r11b,%r11b vpcmpuq $0,.Lmask52x4(%rip),%ymm11,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm12,%k2 kmovb %k1,%r9d kmovb %k2,%r8d shlb $4,%r8b orb %r8b,%r9b vpcmpuq $0,.Lmask52x4(%rip),%ymm13,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm14,%k2 kmovb %k1,%r8d kmovb %k2,%edx shlb $4,%dl orb %dl,%r8b vpcmpuq $0,.Lmask52x4(%rip),%ymm15,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm16,%k2 kmovb %k1,%edx kmovb %k2,%ecx shlb $4,%cl orb %cl,%dl vpcmpuq $0,.Lmask52x4(%rip),%ymm17,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm18,%k2 kmovb %k1,%ecx kmovb %k2,%ebx shlb $4,%bl orb %bl,%cl addb %r9b,%r14b adcb %r8b,%r13b adcb %dl,%r12b adcb %cl,%r11b xorb %r9b,%r14b xorb %r8b,%r13b xorb %dl,%r12b xorb %cl,%r11b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r13d,%k3 shrb $4,%r13b kmovb %r13d,%k4 kmovb %r12d,%k5 shrb $4,%r12b kmovb %r12d,%k6 kmovb %r11d,%k7 vpsubq .Lmask52x4(%rip),%ymm11,%ymm11{%k1} vpsubq .Lmask52x4(%rip),%ymm12,%ymm12{%k2} vpsubq .Lmask52x4(%rip),%ymm13,%ymm13{%k3} vpsubq .Lmask52x4(%rip),%ymm14,%ymm14{%k4} vpsubq .Lmask52x4(%rip),%ymm15,%ymm15{%k5} vpsubq .Lmask52x4(%rip),%ymm16,%ymm16{%k6} vpsubq .Lmask52x4(%rip),%ymm17,%ymm17{%k7} vpandq .Lmask52x4(%rip),%ymm11,%ymm11 vpandq .Lmask52x4(%rip),%ymm12,%ymm12 vpandq .Lmask52x4(%rip),%ymm13,%ymm13 vpandq .Lmask52x4(%rip),%ymm14,%ymm14 vpandq .Lmask52x4(%rip),%ymm15,%ymm15 vpandq .Lmask52x4(%rip),%ymm16,%ymm16 vpandq .Lmask52x4(%rip),%ymm17,%ymm17 shrb $4,%r11b kmovb %r11d,%k1 vpsubq .Lmask52x4(%rip),%ymm18,%ymm18{%k1} vpandq .Lmask52x4(%rip),%ymm18,%ymm18 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm4,32(%rdi) vmovdqu64 %ymm5,64(%rdi) vmovdqu64 %ymm6,96(%rdi) vmovdqu64 %ymm7,128(%rdi) vmovdqu64 %ymm8,160(%rdi) vmovdqu64 %ymm9,192(%rdi) vmovdqu64 %ymm10,224(%rdi) vmovdqu64 %ymm11,256(%rdi) vmovdqu64 %ymm12,288(%rdi) vmovdqu64 %ymm13,320(%rdi) vmovdqu64 %ymm14,352(%rdi) vmovdqu64 %ymm15,384(%rdi) vmovdqu64 %ymm16,416(%rdi) vmovdqu64 %ymm17,448(%rdi) vmovdqu64 %ymm18,480(%rdi) vzeroupper leaq (%rsp),%rax .cfi_def_cfa_register %rax movq 0(%rax),%r15 .cfi_restore %r15 movq 8(%rax),%r14 .cfi_restore %r14 movq 16(%rax),%r13 .cfi_restore %r13 movq 24(%rax),%r12 .cfi_restore %r12 movq 32(%rax),%rbp .cfi_restore %rbp movq 40(%rax),%rbx .cfi_restore %rbx leaq 48(%rax),%rsp .cfi_def_cfa %rsp,8 .Lrsaz_amm52x30_x2_ifma256_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size rsaz_amm52x30_x2_ifma256, .-rsaz_amm52x30_x2_ifma256 .text .align 32 .globl extract_multiplier_2x30_win5 .hidden extract_multiplier_2x30_win5 .type extract_multiplier_2x30_win5,@function extract_multiplier_2x30_win5: .cfi_startproc .byte 243,15,30,250 vmovdqa64 .Lones(%rip),%ymm30 vpbroadcastq %rdx,%ymm28 vpbroadcastq %rcx,%ymm29 leaq 16384(%rsi),%rax vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %ymm0,%ymm27 vmovdqa64 %ymm0,%ymm1 vmovdqa64 %ymm0,%ymm2 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 vmovdqa64 %ymm0,%ymm20 vmovdqa64 %ymm0,%ymm21 vmovdqa64 %ymm0,%ymm22 vmovdqa64 %ymm0,%ymm23 vmovdqa64 %ymm0,%ymm24 vmovdqa64 %ymm0,%ymm25 .align 32 .Lloop: vpcmpq $0,%ymm27,%ymm28,%k1 vpcmpq $0,%ymm27,%ymm29,%k2 vmovdqu64 0(%rsi),%ymm26 vpblendmq %ymm26,%ymm0,%ymm0{%k1} vmovdqu64 32(%rsi),%ymm26 vpblendmq %ymm26,%ymm1,%ymm1{%k1} vmovdqu64 64(%rsi),%ymm26 vpblendmq %ymm26,%ymm2,%ymm2{%k1} vmovdqu64 96(%rsi),%ymm26 vpblendmq %ymm26,%ymm3,%ymm3{%k1} vmovdqu64 128(%rsi),%ymm26 vpblendmq %ymm26,%ymm4,%ymm4{%k1} vmovdqu64 160(%rsi),%ymm26 vpblendmq %ymm26,%ymm5,%ymm5{%k1} vmovdqu64 192(%rsi),%ymm26 vpblendmq %ymm26,%ymm16,%ymm16{%k1} vmovdqu64 224(%rsi),%ymm26 vpblendmq %ymm26,%ymm17,%ymm17{%k1} vmovdqu64 256(%rsi),%ymm26 vpblendmq %ymm26,%ymm18,%ymm18{%k2} vmovdqu64 288(%rsi),%ymm26 vpblendmq %ymm26,%ymm19,%ymm19{%k2} vmovdqu64 320(%rsi),%ymm26 vpblendmq %ymm26,%ymm20,%ymm20{%k2} vmovdqu64 352(%rsi),%ymm26 vpblendmq %ymm26,%ymm21,%ymm21{%k2} vmovdqu64 384(%rsi),%ymm26 vpblendmq %ymm26,%ymm22,%ymm22{%k2} vmovdqu64 416(%rsi),%ymm26 vpblendmq %ymm26,%ymm23,%ymm23{%k2} vmovdqu64 448(%rsi),%ymm26 vpblendmq %ymm26,%ymm24,%ymm24{%k2} vmovdqu64 480(%rsi),%ymm26 vpblendmq %ymm26,%ymm25,%ymm25{%k2} vpaddq %ymm30,%ymm27,%ymm27 addq $512,%rsi cmpq %rsi,%rax jne .Lloop vmovdqu64 %ymm0,0(%rdi) vmovdqu64 %ymm1,32(%rdi) vmovdqu64 %ymm2,64(%rdi) vmovdqu64 %ymm3,96(%rdi) vmovdqu64 %ymm4,128(%rdi) vmovdqu64 %ymm5,160(%rdi) vmovdqu64 %ymm16,192(%rdi) vmovdqu64 %ymm17,224(%rdi) vmovdqu64 %ymm18,256(%rdi) vmovdqu64 %ymm19,288(%rdi) vmovdqu64 %ymm20,320(%rdi) vmovdqu64 %ymm21,352(%rdi) vmovdqu64 %ymm22,384(%rdi) vmovdqu64 %ymm23,416(%rdi) vmovdqu64 %ymm24,448(%rdi) vmovdqu64 %ymm25,480(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size extract_multiplier_2x30_win5, .-extract_multiplier_2x30_win5 .section .rodata .align 32 .Lones: .quad 1,1,1,1 .Lzeros: .quad 0,0,0,0 .text #endif #endif
marvin-hansen/iggy-streaming-system
48,667
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/sha512-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .globl sha512_block_data_order_nohw .hidden sha512_block_data_order_nohw .type sha512_block_data_order_nohw,@function .align 16 sha512_block_data_order_nohw: .cfi_startproc _CET_ENDBR movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 shlq $4,%rdx subq $128+32,%rsp leaq (%rsi,%rdx,8),%rdx andq $-64,%rsp movq %rdi,128+0(%rsp) movq %rsi,128+8(%rsp) movq %rdx,128+16(%rsp) movq %rax,152(%rsp) .cfi_escape 0x0f,0x06,0x77,0x98,0x01,0x06,0x23,0x08 .Lprologue: movq 0(%rdi),%rax movq 8(%rdi),%rbx movq 16(%rdi),%rcx movq 24(%rdi),%rdx movq 32(%rdi),%r8 movq 40(%rdi),%r9 movq 48(%rdi),%r10 movq 56(%rdi),%r11 jmp .Lloop .align 16 .Lloop: movq %rbx,%rdi leaq K512(%rip),%rbp xorq %rcx,%rdi movq 0(%rsi),%r12 movq %r8,%r13 movq %rax,%r14 bswapq %r12 rorq $23,%r13 movq %r9,%r15 xorq %r8,%r13 rorq $5,%r14 xorq %r10,%r15 movq %r12,0(%rsp) xorq %rax,%r14 andq %r8,%r15 rorq $4,%r13 addq %r11,%r12 xorq %r10,%r15 rorq $6,%r14 xorq %r8,%r13 addq %r15,%r12 movq %rax,%r15 addq (%rbp),%r12 xorq %rax,%r14 xorq %rbx,%r15 rorq $14,%r13 movq %rbx,%r11 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r11 addq %r12,%rdx addq %r12,%r11 leaq 8(%rbp),%rbp addq %r14,%r11 movq 8(%rsi),%r12 movq %rdx,%r13 movq %r11,%r14 bswapq %r12 rorq $23,%r13 movq %r8,%rdi xorq %rdx,%r13 rorq $5,%r14 xorq %r9,%rdi movq %r12,8(%rsp) xorq %r11,%r14 andq %rdx,%rdi rorq $4,%r13 addq %r10,%r12 xorq %r9,%rdi rorq $6,%r14 xorq %rdx,%r13 addq %rdi,%r12 movq %r11,%rdi addq (%rbp),%r12 xorq %r11,%r14 xorq %rax,%rdi rorq $14,%r13 movq %rax,%r10 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r10 addq %r12,%rcx addq %r12,%r10 leaq 24(%rbp),%rbp addq %r14,%r10 movq 16(%rsi),%r12 movq %rcx,%r13 movq %r10,%r14 bswapq %r12 rorq $23,%r13 movq %rdx,%r15 xorq %rcx,%r13 rorq $5,%r14 xorq %r8,%r15 movq %r12,16(%rsp) xorq %r10,%r14 andq %rcx,%r15 rorq $4,%r13 addq %r9,%r12 xorq %r8,%r15 rorq $6,%r14 xorq %rcx,%r13 addq %r15,%r12 movq %r10,%r15 addq (%rbp),%r12 xorq %r10,%r14 xorq %r11,%r15 rorq $14,%r13 movq %r11,%r9 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r9 addq %r12,%rbx addq %r12,%r9 leaq 8(%rbp),%rbp addq %r14,%r9 movq 24(%rsi),%r12 movq %rbx,%r13 movq %r9,%r14 bswapq %r12 rorq $23,%r13 movq %rcx,%rdi xorq %rbx,%r13 rorq $5,%r14 xorq %rdx,%rdi movq %r12,24(%rsp) xorq %r9,%r14 andq %rbx,%rdi rorq $4,%r13 addq %r8,%r12 xorq %rdx,%rdi rorq $6,%r14 xorq %rbx,%r13 addq %rdi,%r12 movq %r9,%rdi addq (%rbp),%r12 xorq %r9,%r14 xorq %r10,%rdi rorq $14,%r13 movq %r10,%r8 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r8 addq %r12,%rax addq %r12,%r8 leaq 24(%rbp),%rbp addq %r14,%r8 movq 32(%rsi),%r12 movq %rax,%r13 movq %r8,%r14 bswapq %r12 rorq $23,%r13 movq %rbx,%r15 xorq %rax,%r13 rorq $5,%r14 xorq %rcx,%r15 movq %r12,32(%rsp) xorq %r8,%r14 andq %rax,%r15 rorq $4,%r13 addq %rdx,%r12 xorq %rcx,%r15 rorq $6,%r14 xorq %rax,%r13 addq %r15,%r12 movq %r8,%r15 addq (%rbp),%r12 xorq %r8,%r14 xorq %r9,%r15 rorq $14,%r13 movq %r9,%rdx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rdx addq %r12,%r11 addq %r12,%rdx leaq 8(%rbp),%rbp addq %r14,%rdx movq 40(%rsi),%r12 movq %r11,%r13 movq %rdx,%r14 bswapq %r12 rorq $23,%r13 movq %rax,%rdi xorq %r11,%r13 rorq $5,%r14 xorq %rbx,%rdi movq %r12,40(%rsp) xorq %rdx,%r14 andq %r11,%rdi rorq $4,%r13 addq %rcx,%r12 xorq %rbx,%rdi rorq $6,%r14 xorq %r11,%r13 addq %rdi,%r12 movq %rdx,%rdi addq (%rbp),%r12 xorq %rdx,%r14 xorq %r8,%rdi rorq $14,%r13 movq %r8,%rcx andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rcx addq %r12,%r10 addq %r12,%rcx leaq 24(%rbp),%rbp addq %r14,%rcx movq 48(%rsi),%r12 movq %r10,%r13 movq %rcx,%r14 bswapq %r12 rorq $23,%r13 movq %r11,%r15 xorq %r10,%r13 rorq $5,%r14 xorq %rax,%r15 movq %r12,48(%rsp) xorq %rcx,%r14 andq %r10,%r15 rorq $4,%r13 addq %rbx,%r12 xorq %rax,%r15 rorq $6,%r14 xorq %r10,%r13 addq %r15,%r12 movq %rcx,%r15 addq (%rbp),%r12 xorq %rcx,%r14 xorq %rdx,%r15 rorq $14,%r13 movq %rdx,%rbx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rbx addq %r12,%r9 addq %r12,%rbx leaq 8(%rbp),%rbp addq %r14,%rbx movq 56(%rsi),%r12 movq %r9,%r13 movq %rbx,%r14 bswapq %r12 rorq $23,%r13 movq %r10,%rdi xorq %r9,%r13 rorq $5,%r14 xorq %r11,%rdi movq %r12,56(%rsp) xorq %rbx,%r14 andq %r9,%rdi rorq $4,%r13 addq %rax,%r12 xorq %r11,%rdi rorq $6,%r14 xorq %r9,%r13 addq %rdi,%r12 movq %rbx,%rdi addq (%rbp),%r12 xorq %rbx,%r14 xorq %rcx,%rdi rorq $14,%r13 movq %rcx,%rax andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rax addq %r12,%r8 addq %r12,%rax leaq 24(%rbp),%rbp addq %r14,%rax movq 64(%rsi),%r12 movq %r8,%r13 movq %rax,%r14 bswapq %r12 rorq $23,%r13 movq %r9,%r15 xorq %r8,%r13 rorq $5,%r14 xorq %r10,%r15 movq %r12,64(%rsp) xorq %rax,%r14 andq %r8,%r15 rorq $4,%r13 addq %r11,%r12 xorq %r10,%r15 rorq $6,%r14 xorq %r8,%r13 addq %r15,%r12 movq %rax,%r15 addq (%rbp),%r12 xorq %rax,%r14 xorq %rbx,%r15 rorq $14,%r13 movq %rbx,%r11 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r11 addq %r12,%rdx addq %r12,%r11 leaq 8(%rbp),%rbp addq %r14,%r11 movq 72(%rsi),%r12 movq %rdx,%r13 movq %r11,%r14 bswapq %r12 rorq $23,%r13 movq %r8,%rdi xorq %rdx,%r13 rorq $5,%r14 xorq %r9,%rdi movq %r12,72(%rsp) xorq %r11,%r14 andq %rdx,%rdi rorq $4,%r13 addq %r10,%r12 xorq %r9,%rdi rorq $6,%r14 xorq %rdx,%r13 addq %rdi,%r12 movq %r11,%rdi addq (%rbp),%r12 xorq %r11,%r14 xorq %rax,%rdi rorq $14,%r13 movq %rax,%r10 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r10 addq %r12,%rcx addq %r12,%r10 leaq 24(%rbp),%rbp addq %r14,%r10 movq 80(%rsi),%r12 movq %rcx,%r13 movq %r10,%r14 bswapq %r12 rorq $23,%r13 movq %rdx,%r15 xorq %rcx,%r13 rorq $5,%r14 xorq %r8,%r15 movq %r12,80(%rsp) xorq %r10,%r14 andq %rcx,%r15 rorq $4,%r13 addq %r9,%r12 xorq %r8,%r15 rorq $6,%r14 xorq %rcx,%r13 addq %r15,%r12 movq %r10,%r15 addq (%rbp),%r12 xorq %r10,%r14 xorq %r11,%r15 rorq $14,%r13 movq %r11,%r9 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r9 addq %r12,%rbx addq %r12,%r9 leaq 8(%rbp),%rbp addq %r14,%r9 movq 88(%rsi),%r12 movq %rbx,%r13 movq %r9,%r14 bswapq %r12 rorq $23,%r13 movq %rcx,%rdi xorq %rbx,%r13 rorq $5,%r14 xorq %rdx,%rdi movq %r12,88(%rsp) xorq %r9,%r14 andq %rbx,%rdi rorq $4,%r13 addq %r8,%r12 xorq %rdx,%rdi rorq $6,%r14 xorq %rbx,%r13 addq %rdi,%r12 movq %r9,%rdi addq (%rbp),%r12 xorq %r9,%r14 xorq %r10,%rdi rorq $14,%r13 movq %r10,%r8 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r8 addq %r12,%rax addq %r12,%r8 leaq 24(%rbp),%rbp addq %r14,%r8 movq 96(%rsi),%r12 movq %rax,%r13 movq %r8,%r14 bswapq %r12 rorq $23,%r13 movq %rbx,%r15 xorq %rax,%r13 rorq $5,%r14 xorq %rcx,%r15 movq %r12,96(%rsp) xorq %r8,%r14 andq %rax,%r15 rorq $4,%r13 addq %rdx,%r12 xorq %rcx,%r15 rorq $6,%r14 xorq %rax,%r13 addq %r15,%r12 movq %r8,%r15 addq (%rbp),%r12 xorq %r8,%r14 xorq %r9,%r15 rorq $14,%r13 movq %r9,%rdx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rdx addq %r12,%r11 addq %r12,%rdx leaq 8(%rbp),%rbp addq %r14,%rdx movq 104(%rsi),%r12 movq %r11,%r13 movq %rdx,%r14 bswapq %r12 rorq $23,%r13 movq %rax,%rdi xorq %r11,%r13 rorq $5,%r14 xorq %rbx,%rdi movq %r12,104(%rsp) xorq %rdx,%r14 andq %r11,%rdi rorq $4,%r13 addq %rcx,%r12 xorq %rbx,%rdi rorq $6,%r14 xorq %r11,%r13 addq %rdi,%r12 movq %rdx,%rdi addq (%rbp),%r12 xorq %rdx,%r14 xorq %r8,%rdi rorq $14,%r13 movq %r8,%rcx andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rcx addq %r12,%r10 addq %r12,%rcx leaq 24(%rbp),%rbp addq %r14,%rcx movq 112(%rsi),%r12 movq %r10,%r13 movq %rcx,%r14 bswapq %r12 rorq $23,%r13 movq %r11,%r15 xorq %r10,%r13 rorq $5,%r14 xorq %rax,%r15 movq %r12,112(%rsp) xorq %rcx,%r14 andq %r10,%r15 rorq $4,%r13 addq %rbx,%r12 xorq %rax,%r15 rorq $6,%r14 xorq %r10,%r13 addq %r15,%r12 movq %rcx,%r15 addq (%rbp),%r12 xorq %rcx,%r14 xorq %rdx,%r15 rorq $14,%r13 movq %rdx,%rbx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rbx addq %r12,%r9 addq %r12,%rbx leaq 8(%rbp),%rbp addq %r14,%rbx movq 120(%rsi),%r12 movq %r9,%r13 movq %rbx,%r14 bswapq %r12 rorq $23,%r13 movq %r10,%rdi xorq %r9,%r13 rorq $5,%r14 xorq %r11,%rdi movq %r12,120(%rsp) xorq %rbx,%r14 andq %r9,%rdi rorq $4,%r13 addq %rax,%r12 xorq %r11,%rdi rorq $6,%r14 xorq %r9,%r13 addq %rdi,%r12 movq %rbx,%rdi addq (%rbp),%r12 xorq %rbx,%r14 xorq %rcx,%rdi rorq $14,%r13 movq %rcx,%rax andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rax addq %r12,%r8 addq %r12,%rax leaq 24(%rbp),%rbp jmp .Lrounds_16_xx .align 16 .Lrounds_16_xx: movq 8(%rsp),%r13 movq 112(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%rax movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 72(%rsp),%r12 addq 0(%rsp),%r12 movq %r8,%r13 addq %r15,%r12 movq %rax,%r14 rorq $23,%r13 movq %r9,%r15 xorq %r8,%r13 rorq $5,%r14 xorq %r10,%r15 movq %r12,0(%rsp) xorq %rax,%r14 andq %r8,%r15 rorq $4,%r13 addq %r11,%r12 xorq %r10,%r15 rorq $6,%r14 xorq %r8,%r13 addq %r15,%r12 movq %rax,%r15 addq (%rbp),%r12 xorq %rax,%r14 xorq %rbx,%r15 rorq $14,%r13 movq %rbx,%r11 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r11 addq %r12,%rdx addq %r12,%r11 leaq 8(%rbp),%rbp movq 16(%rsp),%r13 movq 120(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%r11 movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 80(%rsp),%r12 addq 8(%rsp),%r12 movq %rdx,%r13 addq %rdi,%r12 movq %r11,%r14 rorq $23,%r13 movq %r8,%rdi xorq %rdx,%r13 rorq $5,%r14 xorq %r9,%rdi movq %r12,8(%rsp) xorq %r11,%r14 andq %rdx,%rdi rorq $4,%r13 addq %r10,%r12 xorq %r9,%rdi rorq $6,%r14 xorq %rdx,%r13 addq %rdi,%r12 movq %r11,%rdi addq (%rbp),%r12 xorq %r11,%r14 xorq %rax,%rdi rorq $14,%r13 movq %rax,%r10 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r10 addq %r12,%rcx addq %r12,%r10 leaq 24(%rbp),%rbp movq 24(%rsp),%r13 movq 0(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%r10 movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 88(%rsp),%r12 addq 16(%rsp),%r12 movq %rcx,%r13 addq %r15,%r12 movq %r10,%r14 rorq $23,%r13 movq %rdx,%r15 xorq %rcx,%r13 rorq $5,%r14 xorq %r8,%r15 movq %r12,16(%rsp) xorq %r10,%r14 andq %rcx,%r15 rorq $4,%r13 addq %r9,%r12 xorq %r8,%r15 rorq $6,%r14 xorq %rcx,%r13 addq %r15,%r12 movq %r10,%r15 addq (%rbp),%r12 xorq %r10,%r14 xorq %r11,%r15 rorq $14,%r13 movq %r11,%r9 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r9 addq %r12,%rbx addq %r12,%r9 leaq 8(%rbp),%rbp movq 32(%rsp),%r13 movq 8(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%r9 movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 96(%rsp),%r12 addq 24(%rsp),%r12 movq %rbx,%r13 addq %rdi,%r12 movq %r9,%r14 rorq $23,%r13 movq %rcx,%rdi xorq %rbx,%r13 rorq $5,%r14 xorq %rdx,%rdi movq %r12,24(%rsp) xorq %r9,%r14 andq %rbx,%rdi rorq $4,%r13 addq %r8,%r12 xorq %rdx,%rdi rorq $6,%r14 xorq %rbx,%r13 addq %rdi,%r12 movq %r9,%rdi addq (%rbp),%r12 xorq %r9,%r14 xorq %r10,%rdi rorq $14,%r13 movq %r10,%r8 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r8 addq %r12,%rax addq %r12,%r8 leaq 24(%rbp),%rbp movq 40(%rsp),%r13 movq 16(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%r8 movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 104(%rsp),%r12 addq 32(%rsp),%r12 movq %rax,%r13 addq %r15,%r12 movq %r8,%r14 rorq $23,%r13 movq %rbx,%r15 xorq %rax,%r13 rorq $5,%r14 xorq %rcx,%r15 movq %r12,32(%rsp) xorq %r8,%r14 andq %rax,%r15 rorq $4,%r13 addq %rdx,%r12 xorq %rcx,%r15 rorq $6,%r14 xorq %rax,%r13 addq %r15,%r12 movq %r8,%r15 addq (%rbp),%r12 xorq %r8,%r14 xorq %r9,%r15 rorq $14,%r13 movq %r9,%rdx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rdx addq %r12,%r11 addq %r12,%rdx leaq 8(%rbp),%rbp movq 48(%rsp),%r13 movq 24(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%rdx movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 112(%rsp),%r12 addq 40(%rsp),%r12 movq %r11,%r13 addq %rdi,%r12 movq %rdx,%r14 rorq $23,%r13 movq %rax,%rdi xorq %r11,%r13 rorq $5,%r14 xorq %rbx,%rdi movq %r12,40(%rsp) xorq %rdx,%r14 andq %r11,%rdi rorq $4,%r13 addq %rcx,%r12 xorq %rbx,%rdi rorq $6,%r14 xorq %r11,%r13 addq %rdi,%r12 movq %rdx,%rdi addq (%rbp),%r12 xorq %rdx,%r14 xorq %r8,%rdi rorq $14,%r13 movq %r8,%rcx andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rcx addq %r12,%r10 addq %r12,%rcx leaq 24(%rbp),%rbp movq 56(%rsp),%r13 movq 32(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%rcx movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 120(%rsp),%r12 addq 48(%rsp),%r12 movq %r10,%r13 addq %r15,%r12 movq %rcx,%r14 rorq $23,%r13 movq %r11,%r15 xorq %r10,%r13 rorq $5,%r14 xorq %rax,%r15 movq %r12,48(%rsp) xorq %rcx,%r14 andq %r10,%r15 rorq $4,%r13 addq %rbx,%r12 xorq %rax,%r15 rorq $6,%r14 xorq %r10,%r13 addq %r15,%r12 movq %rcx,%r15 addq (%rbp),%r12 xorq %rcx,%r14 xorq %rdx,%r15 rorq $14,%r13 movq %rdx,%rbx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rbx addq %r12,%r9 addq %r12,%rbx leaq 8(%rbp),%rbp movq 64(%rsp),%r13 movq 40(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%rbx movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 0(%rsp),%r12 addq 56(%rsp),%r12 movq %r9,%r13 addq %rdi,%r12 movq %rbx,%r14 rorq $23,%r13 movq %r10,%rdi xorq %r9,%r13 rorq $5,%r14 xorq %r11,%rdi movq %r12,56(%rsp) xorq %rbx,%r14 andq %r9,%rdi rorq $4,%r13 addq %rax,%r12 xorq %r11,%rdi rorq $6,%r14 xorq %r9,%r13 addq %rdi,%r12 movq %rbx,%rdi addq (%rbp),%r12 xorq %rbx,%r14 xorq %rcx,%rdi rorq $14,%r13 movq %rcx,%rax andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rax addq %r12,%r8 addq %r12,%rax leaq 24(%rbp),%rbp movq 72(%rsp),%r13 movq 48(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%rax movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 8(%rsp),%r12 addq 64(%rsp),%r12 movq %r8,%r13 addq %r15,%r12 movq %rax,%r14 rorq $23,%r13 movq %r9,%r15 xorq %r8,%r13 rorq $5,%r14 xorq %r10,%r15 movq %r12,64(%rsp) xorq %rax,%r14 andq %r8,%r15 rorq $4,%r13 addq %r11,%r12 xorq %r10,%r15 rorq $6,%r14 xorq %r8,%r13 addq %r15,%r12 movq %rax,%r15 addq (%rbp),%r12 xorq %rax,%r14 xorq %rbx,%r15 rorq $14,%r13 movq %rbx,%r11 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r11 addq %r12,%rdx addq %r12,%r11 leaq 8(%rbp),%rbp movq 80(%rsp),%r13 movq 56(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%r11 movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 16(%rsp),%r12 addq 72(%rsp),%r12 movq %rdx,%r13 addq %rdi,%r12 movq %r11,%r14 rorq $23,%r13 movq %r8,%rdi xorq %rdx,%r13 rorq $5,%r14 xorq %r9,%rdi movq %r12,72(%rsp) xorq %r11,%r14 andq %rdx,%rdi rorq $4,%r13 addq %r10,%r12 xorq %r9,%rdi rorq $6,%r14 xorq %rdx,%r13 addq %rdi,%r12 movq %r11,%rdi addq (%rbp),%r12 xorq %r11,%r14 xorq %rax,%rdi rorq $14,%r13 movq %rax,%r10 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r10 addq %r12,%rcx addq %r12,%r10 leaq 24(%rbp),%rbp movq 88(%rsp),%r13 movq 64(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%r10 movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 24(%rsp),%r12 addq 80(%rsp),%r12 movq %rcx,%r13 addq %r15,%r12 movq %r10,%r14 rorq $23,%r13 movq %rdx,%r15 xorq %rcx,%r13 rorq $5,%r14 xorq %r8,%r15 movq %r12,80(%rsp) xorq %r10,%r14 andq %rcx,%r15 rorq $4,%r13 addq %r9,%r12 xorq %r8,%r15 rorq $6,%r14 xorq %rcx,%r13 addq %r15,%r12 movq %r10,%r15 addq (%rbp),%r12 xorq %r10,%r14 xorq %r11,%r15 rorq $14,%r13 movq %r11,%r9 andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%r9 addq %r12,%rbx addq %r12,%r9 leaq 8(%rbp),%rbp movq 96(%rsp),%r13 movq 72(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%r9 movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 32(%rsp),%r12 addq 88(%rsp),%r12 movq %rbx,%r13 addq %rdi,%r12 movq %r9,%r14 rorq $23,%r13 movq %rcx,%rdi xorq %rbx,%r13 rorq $5,%r14 xorq %rdx,%rdi movq %r12,88(%rsp) xorq %r9,%r14 andq %rbx,%rdi rorq $4,%r13 addq %r8,%r12 xorq %rdx,%rdi rorq $6,%r14 xorq %rbx,%r13 addq %rdi,%r12 movq %r9,%rdi addq (%rbp),%r12 xorq %r9,%r14 xorq %r10,%rdi rorq $14,%r13 movq %r10,%r8 andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%r8 addq %r12,%rax addq %r12,%r8 leaq 24(%rbp),%rbp movq 104(%rsp),%r13 movq 80(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%r8 movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 40(%rsp),%r12 addq 96(%rsp),%r12 movq %rax,%r13 addq %r15,%r12 movq %r8,%r14 rorq $23,%r13 movq %rbx,%r15 xorq %rax,%r13 rorq $5,%r14 xorq %rcx,%r15 movq %r12,96(%rsp) xorq %r8,%r14 andq %rax,%r15 rorq $4,%r13 addq %rdx,%r12 xorq %rcx,%r15 rorq $6,%r14 xorq %rax,%r13 addq %r15,%r12 movq %r8,%r15 addq (%rbp),%r12 xorq %r8,%r14 xorq %r9,%r15 rorq $14,%r13 movq %r9,%rdx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rdx addq %r12,%r11 addq %r12,%rdx leaq 8(%rbp),%rbp movq 112(%rsp),%r13 movq 88(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%rdx movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 48(%rsp),%r12 addq 104(%rsp),%r12 movq %r11,%r13 addq %rdi,%r12 movq %rdx,%r14 rorq $23,%r13 movq %rax,%rdi xorq %r11,%r13 rorq $5,%r14 xorq %rbx,%rdi movq %r12,104(%rsp) xorq %rdx,%r14 andq %r11,%rdi rorq $4,%r13 addq %rcx,%r12 xorq %rbx,%rdi rorq $6,%r14 xorq %r11,%r13 addq %rdi,%r12 movq %rdx,%rdi addq (%rbp),%r12 xorq %rdx,%r14 xorq %r8,%rdi rorq $14,%r13 movq %r8,%rcx andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rcx addq %r12,%r10 addq %r12,%rcx leaq 24(%rbp),%rbp movq 120(%rsp),%r13 movq 96(%rsp),%r15 movq %r13,%r12 rorq $7,%r13 addq %r14,%rcx movq %r15,%r14 rorq $42,%r15 xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%r15 shrq $6,%r14 rorq $19,%r15 xorq %r13,%r12 xorq %r14,%r15 addq 56(%rsp),%r12 addq 112(%rsp),%r12 movq %r10,%r13 addq %r15,%r12 movq %rcx,%r14 rorq $23,%r13 movq %r11,%r15 xorq %r10,%r13 rorq $5,%r14 xorq %rax,%r15 movq %r12,112(%rsp) xorq %rcx,%r14 andq %r10,%r15 rorq $4,%r13 addq %rbx,%r12 xorq %rax,%r15 rorq $6,%r14 xorq %r10,%r13 addq %r15,%r12 movq %rcx,%r15 addq (%rbp),%r12 xorq %rcx,%r14 xorq %rdx,%r15 rorq $14,%r13 movq %rdx,%rbx andq %r15,%rdi rorq $28,%r14 addq %r13,%r12 xorq %rdi,%rbx addq %r12,%r9 addq %r12,%rbx leaq 8(%rbp),%rbp movq 0(%rsp),%r13 movq 104(%rsp),%rdi movq %r13,%r12 rorq $7,%r13 addq %r14,%rbx movq %rdi,%r14 rorq $42,%rdi xorq %r12,%r13 shrq $7,%r12 rorq $1,%r13 xorq %r14,%rdi shrq $6,%r14 rorq $19,%rdi xorq %r13,%r12 xorq %r14,%rdi addq 64(%rsp),%r12 addq 120(%rsp),%r12 movq %r9,%r13 addq %rdi,%r12 movq %rbx,%r14 rorq $23,%r13 movq %r10,%rdi xorq %r9,%r13 rorq $5,%r14 xorq %r11,%rdi movq %r12,120(%rsp) xorq %rbx,%r14 andq %r9,%rdi rorq $4,%r13 addq %rax,%r12 xorq %r11,%rdi rorq $6,%r14 xorq %r9,%r13 addq %rdi,%r12 movq %rbx,%rdi addq (%rbp),%r12 xorq %rbx,%r14 xorq %rcx,%rdi rorq $14,%r13 movq %rcx,%rax andq %rdi,%r15 rorq $28,%r14 addq %r13,%r12 xorq %r15,%rax addq %r12,%r8 addq %r12,%rax leaq 24(%rbp),%rbp cmpb $0,7(%rbp) jnz .Lrounds_16_xx movq 128+0(%rsp),%rdi addq %r14,%rax leaq 128(%rsi),%rsi addq 0(%rdi),%rax addq 8(%rdi),%rbx addq 16(%rdi),%rcx addq 24(%rdi),%rdx addq 32(%rdi),%r8 addq 40(%rdi),%r9 addq 48(%rdi),%r10 addq 56(%rdi),%r11 cmpq 128+16(%rsp),%rsi movq %rax,0(%rdi) movq %rbx,8(%rdi) movq %rcx,16(%rdi) movq %rdx,24(%rdi) movq %r8,32(%rdi) movq %r9,40(%rdi) movq %r10,48(%rdi) movq %r11,56(%rdi) jb .Lloop movq 152(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue: .byte 0xf3,0xc3 .cfi_endproc .size sha512_block_data_order_nohw,.-sha512_block_data_order_nohw .section .rodata .align 64 .type K512,@object K512: .quad 0x428a2f98d728ae22,0x7137449123ef65cd .quad 0x428a2f98d728ae22,0x7137449123ef65cd .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc .quad 0x3956c25bf348b538,0x59f111f1b605d019 .quad 0x3956c25bf348b538,0x59f111f1b605d019 .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 .quad 0xd807aa98a3030242,0x12835b0145706fbe .quad 0xd807aa98a3030242,0x12835b0145706fbe .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 .quad 0x9bdc06a725c71235,0xc19bf174cf692694 .quad 0x9bdc06a725c71235,0xc19bf174cf692694 .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 .quad 0x983e5152ee66dfab,0xa831c66d2db43210 .quad 0x983e5152ee66dfab,0xa831c66d2db43210 .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 .quad 0x06ca6351e003826f,0x142929670a0e6e70 .quad 0x06ca6351e003826f,0x142929670a0e6e70 .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df .quad 0x650a73548baf63de,0x766a0abb3c77b2a8 .quad 0x650a73548baf63de,0x766a0abb3c77b2a8 .quad 0x81c2c92e47edaee6,0x92722c851482353b .quad 0x81c2c92e47edaee6,0x92722c851482353b .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 .quad 0xc24b8b70d0f89791,0xc76c51a30654be30 .quad 0xc24b8b70d0f89791,0xc76c51a30654be30 .quad 0xd192e819d6ef5218,0xd69906245565a910 .quad 0xd192e819d6ef5218,0xd69906245565a910 .quad 0xf40e35855771202a,0x106aa07032bbd1b8 .quad 0xf40e35855771202a,0x106aa07032bbd1b8 .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 .quad 0x748f82ee5defb2fc,0x78a5636f43172f60 .quad 0x748f82ee5defb2fc,0x78a5636f43172f60 .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec .quad 0x90befffa23631e28,0xa4506cebde82bde9 .quad 0x90befffa23631e28,0xa4506cebde82bde9 .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b .quad 0xca273eceea26619c,0xd186b8c721c0c207 .quad 0xca273eceea26619c,0xd186b8c721c0c207 .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 .quad 0x113f9804bef90dae,0x1b710b35131c471b .quad 0x113f9804bef90dae,0x1b710b35131c471b .quad 0x28db77f523047d84,0x32caab7b40c72493 .quad 0x28db77f523047d84,0x32caab7b40c72493 .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 .quad 0x0001020304050607,0x08090a0b0c0d0e0f .quad 0x0001020304050607,0x08090a0b0c0d0e0f .byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .globl sha512_block_data_order_avx .hidden sha512_block_data_order_avx .type sha512_block_data_order_avx,@function .align 64 sha512_block_data_order_avx: .cfi_startproc _CET_ENDBR movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 shlq $4,%rdx subq $160,%rsp leaq (%rsi,%rdx,8),%rdx andq $-64,%rsp movq %rdi,128+0(%rsp) movq %rsi,128+8(%rsp) movq %rdx,128+16(%rsp) movq %rax,152(%rsp) .cfi_escape 0x0f,0x06,0x77,0x98,0x01,0x06,0x23,0x08 .Lprologue_avx: vzeroupper movq 0(%rdi),%rax movq 8(%rdi),%rbx movq 16(%rdi),%rcx movq 24(%rdi),%rdx movq 32(%rdi),%r8 movq 40(%rdi),%r9 movq 48(%rdi),%r10 movq 56(%rdi),%r11 jmp .Lloop_avx .align 16 .Lloop_avx: vmovdqa K512+1280(%rip),%xmm11 vmovdqu 0(%rsi),%xmm0 leaq K512+128(%rip),%rbp vmovdqu 16(%rsi),%xmm1 vmovdqu 32(%rsi),%xmm2 vpshufb %xmm11,%xmm0,%xmm0 vmovdqu 48(%rsi),%xmm3 vpshufb %xmm11,%xmm1,%xmm1 vmovdqu 64(%rsi),%xmm4 vpshufb %xmm11,%xmm2,%xmm2 vmovdqu 80(%rsi),%xmm5 vpshufb %xmm11,%xmm3,%xmm3 vmovdqu 96(%rsi),%xmm6 vpshufb %xmm11,%xmm4,%xmm4 vmovdqu 112(%rsi),%xmm7 vpshufb %xmm11,%xmm5,%xmm5 vpaddq -128(%rbp),%xmm0,%xmm8 vpshufb %xmm11,%xmm6,%xmm6 vpaddq -96(%rbp),%xmm1,%xmm9 vpshufb %xmm11,%xmm7,%xmm7 vpaddq -64(%rbp),%xmm2,%xmm10 vpaddq -32(%rbp),%xmm3,%xmm11 vmovdqa %xmm8,0(%rsp) vpaddq 0(%rbp),%xmm4,%xmm8 vmovdqa %xmm9,16(%rsp) vpaddq 32(%rbp),%xmm5,%xmm9 vmovdqa %xmm10,32(%rsp) vpaddq 64(%rbp),%xmm6,%xmm10 vmovdqa %xmm11,48(%rsp) vpaddq 96(%rbp),%xmm7,%xmm11 vmovdqa %xmm8,64(%rsp) movq %rax,%r14 vmovdqa %xmm9,80(%rsp) movq %rbx,%rdi vmovdqa %xmm10,96(%rsp) xorq %rcx,%rdi vmovdqa %xmm11,112(%rsp) movq %r8,%r13 jmp .Lavx_00_47 .align 16 .Lavx_00_47: addq $256,%rbp vpalignr $8,%xmm0,%xmm1,%xmm8 shrdq $23,%r13,%r13 movq %r14,%rax vpalignr $8,%xmm4,%xmm5,%xmm11 movq %r9,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %r8,%r13 xorq %r10,%r12 vpaddq %xmm11,%xmm0,%xmm0 shrdq $4,%r13,%r13 xorq %rax,%r14 vpsrlq $7,%xmm8,%xmm11 andq %r8,%r12 xorq %r8,%r13 vpsllq $56,%xmm8,%xmm9 addq 0(%rsp),%r11 movq %rax,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %r10,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %rbx,%r15 addq %r12,%r11 vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %rax,%r14 addq %r13,%r11 vpxor %xmm10,%xmm8,%xmm8 xorq %rbx,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm7,%xmm11 addq %r11,%rdx addq %rdi,%r11 vpxor %xmm9,%xmm8,%xmm8 movq %rdx,%r13 addq %r11,%r14 vpsllq $3,%xmm7,%xmm10 shrdq $23,%r13,%r13 movq %r14,%r11 vpaddq %xmm8,%xmm0,%xmm0 movq %r8,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm7,%xmm9 xorq %rdx,%r13 xorq %r9,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %r11,%r14 vpsllq $42,%xmm10,%xmm10 andq %rdx,%r12 xorq %rdx,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 8(%rsp),%r10 movq %r11,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %r9,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %rax,%rdi addq %r12,%r10 vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm0,%xmm0 xorq %r11,%r14 addq %r13,%r10 vpaddq -128(%rbp),%xmm0,%xmm10 xorq %rax,%r15 shrdq $28,%r14,%r14 addq %r10,%rcx addq %r15,%r10 movq %rcx,%r13 addq %r10,%r14 vmovdqa %xmm10,0(%rsp) vpalignr $8,%xmm1,%xmm2,%xmm8 shrdq $23,%r13,%r13 movq %r14,%r10 vpalignr $8,%xmm5,%xmm6,%xmm11 movq %rdx,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %rcx,%r13 xorq %r8,%r12 vpaddq %xmm11,%xmm1,%xmm1 shrdq $4,%r13,%r13 xorq %r10,%r14 vpsrlq $7,%xmm8,%xmm11 andq %rcx,%r12 xorq %rcx,%r13 vpsllq $56,%xmm8,%xmm9 addq 16(%rsp),%r9 movq %r10,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %r8,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %r11,%r15 addq %r12,%r9 vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %r10,%r14 addq %r13,%r9 vpxor %xmm10,%xmm8,%xmm8 xorq %r11,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm0,%xmm11 addq %r9,%rbx addq %rdi,%r9 vpxor %xmm9,%xmm8,%xmm8 movq %rbx,%r13 addq %r9,%r14 vpsllq $3,%xmm0,%xmm10 shrdq $23,%r13,%r13 movq %r14,%r9 vpaddq %xmm8,%xmm1,%xmm1 movq %rcx,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm0,%xmm9 xorq %rbx,%r13 xorq %rdx,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %r9,%r14 vpsllq $42,%xmm10,%xmm10 andq %rbx,%r12 xorq %rbx,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 24(%rsp),%r8 movq %r9,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %rdx,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %r10,%rdi addq %r12,%r8 vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm1,%xmm1 xorq %r9,%r14 addq %r13,%r8 vpaddq -96(%rbp),%xmm1,%xmm10 xorq %r10,%r15 shrdq $28,%r14,%r14 addq %r8,%rax addq %r15,%r8 movq %rax,%r13 addq %r8,%r14 vmovdqa %xmm10,16(%rsp) vpalignr $8,%xmm2,%xmm3,%xmm8 shrdq $23,%r13,%r13 movq %r14,%r8 vpalignr $8,%xmm6,%xmm7,%xmm11 movq %rbx,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %rax,%r13 xorq %rcx,%r12 vpaddq %xmm11,%xmm2,%xmm2 shrdq $4,%r13,%r13 xorq %r8,%r14 vpsrlq $7,%xmm8,%xmm11 andq %rax,%r12 xorq %rax,%r13 vpsllq $56,%xmm8,%xmm9 addq 32(%rsp),%rdx movq %r8,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %rcx,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %r9,%r15 addq %r12,%rdx vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %r8,%r14 addq %r13,%rdx vpxor %xmm10,%xmm8,%xmm8 xorq %r9,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm1,%xmm11 addq %rdx,%r11 addq %rdi,%rdx vpxor %xmm9,%xmm8,%xmm8 movq %r11,%r13 addq %rdx,%r14 vpsllq $3,%xmm1,%xmm10 shrdq $23,%r13,%r13 movq %r14,%rdx vpaddq %xmm8,%xmm2,%xmm2 movq %rax,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm1,%xmm9 xorq %r11,%r13 xorq %rbx,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %rdx,%r14 vpsllq $42,%xmm10,%xmm10 andq %r11,%r12 xorq %r11,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 40(%rsp),%rcx movq %rdx,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %rbx,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %r8,%rdi addq %r12,%rcx vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm2,%xmm2 xorq %rdx,%r14 addq %r13,%rcx vpaddq -64(%rbp),%xmm2,%xmm10 xorq %r8,%r15 shrdq $28,%r14,%r14 addq %rcx,%r10 addq %r15,%rcx movq %r10,%r13 addq %rcx,%r14 vmovdqa %xmm10,32(%rsp) vpalignr $8,%xmm3,%xmm4,%xmm8 shrdq $23,%r13,%r13 movq %r14,%rcx vpalignr $8,%xmm7,%xmm0,%xmm11 movq %r11,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %r10,%r13 xorq %rax,%r12 vpaddq %xmm11,%xmm3,%xmm3 shrdq $4,%r13,%r13 xorq %rcx,%r14 vpsrlq $7,%xmm8,%xmm11 andq %r10,%r12 xorq %r10,%r13 vpsllq $56,%xmm8,%xmm9 addq 48(%rsp),%rbx movq %rcx,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %rax,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %rdx,%r15 addq %r12,%rbx vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %rcx,%r14 addq %r13,%rbx vpxor %xmm10,%xmm8,%xmm8 xorq %rdx,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm2,%xmm11 addq %rbx,%r9 addq %rdi,%rbx vpxor %xmm9,%xmm8,%xmm8 movq %r9,%r13 addq %rbx,%r14 vpsllq $3,%xmm2,%xmm10 shrdq $23,%r13,%r13 movq %r14,%rbx vpaddq %xmm8,%xmm3,%xmm3 movq %r10,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm2,%xmm9 xorq %r9,%r13 xorq %r11,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %rbx,%r14 vpsllq $42,%xmm10,%xmm10 andq %r9,%r12 xorq %r9,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 56(%rsp),%rax movq %rbx,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %r11,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %rcx,%rdi addq %r12,%rax vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm3,%xmm3 xorq %rbx,%r14 addq %r13,%rax vpaddq -32(%rbp),%xmm3,%xmm10 xorq %rcx,%r15 shrdq $28,%r14,%r14 addq %rax,%r8 addq %r15,%rax movq %r8,%r13 addq %rax,%r14 vmovdqa %xmm10,48(%rsp) vpalignr $8,%xmm4,%xmm5,%xmm8 shrdq $23,%r13,%r13 movq %r14,%rax vpalignr $8,%xmm0,%xmm1,%xmm11 movq %r9,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %r8,%r13 xorq %r10,%r12 vpaddq %xmm11,%xmm4,%xmm4 shrdq $4,%r13,%r13 xorq %rax,%r14 vpsrlq $7,%xmm8,%xmm11 andq %r8,%r12 xorq %r8,%r13 vpsllq $56,%xmm8,%xmm9 addq 64(%rsp),%r11 movq %rax,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %r10,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %rbx,%r15 addq %r12,%r11 vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %rax,%r14 addq %r13,%r11 vpxor %xmm10,%xmm8,%xmm8 xorq %rbx,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm3,%xmm11 addq %r11,%rdx addq %rdi,%r11 vpxor %xmm9,%xmm8,%xmm8 movq %rdx,%r13 addq %r11,%r14 vpsllq $3,%xmm3,%xmm10 shrdq $23,%r13,%r13 movq %r14,%r11 vpaddq %xmm8,%xmm4,%xmm4 movq %r8,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm3,%xmm9 xorq %rdx,%r13 xorq %r9,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %r11,%r14 vpsllq $42,%xmm10,%xmm10 andq %rdx,%r12 xorq %rdx,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 72(%rsp),%r10 movq %r11,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %r9,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %rax,%rdi addq %r12,%r10 vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm4,%xmm4 xorq %r11,%r14 addq %r13,%r10 vpaddq 0(%rbp),%xmm4,%xmm10 xorq %rax,%r15 shrdq $28,%r14,%r14 addq %r10,%rcx addq %r15,%r10 movq %rcx,%r13 addq %r10,%r14 vmovdqa %xmm10,64(%rsp) vpalignr $8,%xmm5,%xmm6,%xmm8 shrdq $23,%r13,%r13 movq %r14,%r10 vpalignr $8,%xmm1,%xmm2,%xmm11 movq %rdx,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %rcx,%r13 xorq %r8,%r12 vpaddq %xmm11,%xmm5,%xmm5 shrdq $4,%r13,%r13 xorq %r10,%r14 vpsrlq $7,%xmm8,%xmm11 andq %rcx,%r12 xorq %rcx,%r13 vpsllq $56,%xmm8,%xmm9 addq 80(%rsp),%r9 movq %r10,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %r8,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %r11,%r15 addq %r12,%r9 vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %r10,%r14 addq %r13,%r9 vpxor %xmm10,%xmm8,%xmm8 xorq %r11,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm4,%xmm11 addq %r9,%rbx addq %rdi,%r9 vpxor %xmm9,%xmm8,%xmm8 movq %rbx,%r13 addq %r9,%r14 vpsllq $3,%xmm4,%xmm10 shrdq $23,%r13,%r13 movq %r14,%r9 vpaddq %xmm8,%xmm5,%xmm5 movq %rcx,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm4,%xmm9 xorq %rbx,%r13 xorq %rdx,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %r9,%r14 vpsllq $42,%xmm10,%xmm10 andq %rbx,%r12 xorq %rbx,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 88(%rsp),%r8 movq %r9,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %rdx,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %r10,%rdi addq %r12,%r8 vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm5,%xmm5 xorq %r9,%r14 addq %r13,%r8 vpaddq 32(%rbp),%xmm5,%xmm10 xorq %r10,%r15 shrdq $28,%r14,%r14 addq %r8,%rax addq %r15,%r8 movq %rax,%r13 addq %r8,%r14 vmovdqa %xmm10,80(%rsp) vpalignr $8,%xmm6,%xmm7,%xmm8 shrdq $23,%r13,%r13 movq %r14,%r8 vpalignr $8,%xmm2,%xmm3,%xmm11 movq %rbx,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %rax,%r13 xorq %rcx,%r12 vpaddq %xmm11,%xmm6,%xmm6 shrdq $4,%r13,%r13 xorq %r8,%r14 vpsrlq $7,%xmm8,%xmm11 andq %rax,%r12 xorq %rax,%r13 vpsllq $56,%xmm8,%xmm9 addq 96(%rsp),%rdx movq %r8,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %rcx,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %r9,%r15 addq %r12,%rdx vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %r8,%r14 addq %r13,%rdx vpxor %xmm10,%xmm8,%xmm8 xorq %r9,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm5,%xmm11 addq %rdx,%r11 addq %rdi,%rdx vpxor %xmm9,%xmm8,%xmm8 movq %r11,%r13 addq %rdx,%r14 vpsllq $3,%xmm5,%xmm10 shrdq $23,%r13,%r13 movq %r14,%rdx vpaddq %xmm8,%xmm6,%xmm6 movq %rax,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm5,%xmm9 xorq %r11,%r13 xorq %rbx,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %rdx,%r14 vpsllq $42,%xmm10,%xmm10 andq %r11,%r12 xorq %r11,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 104(%rsp),%rcx movq %rdx,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %rbx,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %r8,%rdi addq %r12,%rcx vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm6,%xmm6 xorq %rdx,%r14 addq %r13,%rcx vpaddq 64(%rbp),%xmm6,%xmm10 xorq %r8,%r15 shrdq $28,%r14,%r14 addq %rcx,%r10 addq %r15,%rcx movq %r10,%r13 addq %rcx,%r14 vmovdqa %xmm10,96(%rsp) vpalignr $8,%xmm7,%xmm0,%xmm8 shrdq $23,%r13,%r13 movq %r14,%rcx vpalignr $8,%xmm3,%xmm4,%xmm11 movq %r11,%r12 shrdq $5,%r14,%r14 vpsrlq $1,%xmm8,%xmm10 xorq %r10,%r13 xorq %rax,%r12 vpaddq %xmm11,%xmm7,%xmm7 shrdq $4,%r13,%r13 xorq %rcx,%r14 vpsrlq $7,%xmm8,%xmm11 andq %r10,%r12 xorq %r10,%r13 vpsllq $56,%xmm8,%xmm9 addq 112(%rsp),%rbx movq %rcx,%r15 vpxor %xmm10,%xmm11,%xmm8 xorq %rax,%r12 shrdq $6,%r14,%r14 vpsrlq $7,%xmm10,%xmm10 xorq %rdx,%r15 addq %r12,%rbx vpxor %xmm9,%xmm8,%xmm8 shrdq $14,%r13,%r13 andq %r15,%rdi vpsllq $7,%xmm9,%xmm9 xorq %rcx,%r14 addq %r13,%rbx vpxor %xmm10,%xmm8,%xmm8 xorq %rdx,%rdi shrdq $28,%r14,%r14 vpsrlq $6,%xmm6,%xmm11 addq %rbx,%r9 addq %rdi,%rbx vpxor %xmm9,%xmm8,%xmm8 movq %r9,%r13 addq %rbx,%r14 vpsllq $3,%xmm6,%xmm10 shrdq $23,%r13,%r13 movq %r14,%rbx vpaddq %xmm8,%xmm7,%xmm7 movq %r10,%r12 shrdq $5,%r14,%r14 vpsrlq $19,%xmm6,%xmm9 xorq %r9,%r13 xorq %r11,%r12 vpxor %xmm10,%xmm11,%xmm11 shrdq $4,%r13,%r13 xorq %rbx,%r14 vpsllq $42,%xmm10,%xmm10 andq %r9,%r12 xorq %r9,%r13 vpxor %xmm9,%xmm11,%xmm11 addq 120(%rsp),%rax movq %rbx,%rdi vpsrlq $42,%xmm9,%xmm9 xorq %r11,%r12 shrdq $6,%r14,%r14 vpxor %xmm10,%xmm11,%xmm11 xorq %rcx,%rdi addq %r12,%rax vpxor %xmm9,%xmm11,%xmm11 shrdq $14,%r13,%r13 andq %rdi,%r15 vpaddq %xmm11,%xmm7,%xmm7 xorq %rbx,%r14 addq %r13,%rax vpaddq 96(%rbp),%xmm7,%xmm10 xorq %rcx,%r15 shrdq $28,%r14,%r14 addq %rax,%r8 addq %r15,%rax movq %r8,%r13 addq %rax,%r14 vmovdqa %xmm10,112(%rsp) cmpb $0,135(%rbp) jne .Lavx_00_47 shrdq $23,%r13,%r13 movq %r14,%rax movq %r9,%r12 shrdq $5,%r14,%r14 xorq %r8,%r13 xorq %r10,%r12 shrdq $4,%r13,%r13 xorq %rax,%r14 andq %r8,%r12 xorq %r8,%r13 addq 0(%rsp),%r11 movq %rax,%r15 xorq %r10,%r12 shrdq $6,%r14,%r14 xorq %rbx,%r15 addq %r12,%r11 shrdq $14,%r13,%r13 andq %r15,%rdi xorq %rax,%r14 addq %r13,%r11 xorq %rbx,%rdi shrdq $28,%r14,%r14 addq %r11,%rdx addq %rdi,%r11 movq %rdx,%r13 addq %r11,%r14 shrdq $23,%r13,%r13 movq %r14,%r11 movq %r8,%r12 shrdq $5,%r14,%r14 xorq %rdx,%r13 xorq %r9,%r12 shrdq $4,%r13,%r13 xorq %r11,%r14 andq %rdx,%r12 xorq %rdx,%r13 addq 8(%rsp),%r10 movq %r11,%rdi xorq %r9,%r12 shrdq $6,%r14,%r14 xorq %rax,%rdi addq %r12,%r10 shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %r11,%r14 addq %r13,%r10 xorq %rax,%r15 shrdq $28,%r14,%r14 addq %r10,%rcx addq %r15,%r10 movq %rcx,%r13 addq %r10,%r14 shrdq $23,%r13,%r13 movq %r14,%r10 movq %rdx,%r12 shrdq $5,%r14,%r14 xorq %rcx,%r13 xorq %r8,%r12 shrdq $4,%r13,%r13 xorq %r10,%r14 andq %rcx,%r12 xorq %rcx,%r13 addq 16(%rsp),%r9 movq %r10,%r15 xorq %r8,%r12 shrdq $6,%r14,%r14 xorq %r11,%r15 addq %r12,%r9 shrdq $14,%r13,%r13 andq %r15,%rdi xorq %r10,%r14 addq %r13,%r9 xorq %r11,%rdi shrdq $28,%r14,%r14 addq %r9,%rbx addq %rdi,%r9 movq %rbx,%r13 addq %r9,%r14 shrdq $23,%r13,%r13 movq %r14,%r9 movq %rcx,%r12 shrdq $5,%r14,%r14 xorq %rbx,%r13 xorq %rdx,%r12 shrdq $4,%r13,%r13 xorq %r9,%r14 andq %rbx,%r12 xorq %rbx,%r13 addq 24(%rsp),%r8 movq %r9,%rdi xorq %rdx,%r12 shrdq $6,%r14,%r14 xorq %r10,%rdi addq %r12,%r8 shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %r9,%r14 addq %r13,%r8 xorq %r10,%r15 shrdq $28,%r14,%r14 addq %r8,%rax addq %r15,%r8 movq %rax,%r13 addq %r8,%r14 shrdq $23,%r13,%r13 movq %r14,%r8 movq %rbx,%r12 shrdq $5,%r14,%r14 xorq %rax,%r13 xorq %rcx,%r12 shrdq $4,%r13,%r13 xorq %r8,%r14 andq %rax,%r12 xorq %rax,%r13 addq 32(%rsp),%rdx movq %r8,%r15 xorq %rcx,%r12 shrdq $6,%r14,%r14 xorq %r9,%r15 addq %r12,%rdx shrdq $14,%r13,%r13 andq %r15,%rdi xorq %r8,%r14 addq %r13,%rdx xorq %r9,%rdi shrdq $28,%r14,%r14 addq %rdx,%r11 addq %rdi,%rdx movq %r11,%r13 addq %rdx,%r14 shrdq $23,%r13,%r13 movq %r14,%rdx movq %rax,%r12 shrdq $5,%r14,%r14 xorq %r11,%r13 xorq %rbx,%r12 shrdq $4,%r13,%r13 xorq %rdx,%r14 andq %r11,%r12 xorq %r11,%r13 addq 40(%rsp),%rcx movq %rdx,%rdi xorq %rbx,%r12 shrdq $6,%r14,%r14 xorq %r8,%rdi addq %r12,%rcx shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %rdx,%r14 addq %r13,%rcx xorq %r8,%r15 shrdq $28,%r14,%r14 addq %rcx,%r10 addq %r15,%rcx movq %r10,%r13 addq %rcx,%r14 shrdq $23,%r13,%r13 movq %r14,%rcx movq %r11,%r12 shrdq $5,%r14,%r14 xorq %r10,%r13 xorq %rax,%r12 shrdq $4,%r13,%r13 xorq %rcx,%r14 andq %r10,%r12 xorq %r10,%r13 addq 48(%rsp),%rbx movq %rcx,%r15 xorq %rax,%r12 shrdq $6,%r14,%r14 xorq %rdx,%r15 addq %r12,%rbx shrdq $14,%r13,%r13 andq %r15,%rdi xorq %rcx,%r14 addq %r13,%rbx xorq %rdx,%rdi shrdq $28,%r14,%r14 addq %rbx,%r9 addq %rdi,%rbx movq %r9,%r13 addq %rbx,%r14 shrdq $23,%r13,%r13 movq %r14,%rbx movq %r10,%r12 shrdq $5,%r14,%r14 xorq %r9,%r13 xorq %r11,%r12 shrdq $4,%r13,%r13 xorq %rbx,%r14 andq %r9,%r12 xorq %r9,%r13 addq 56(%rsp),%rax movq %rbx,%rdi xorq %r11,%r12 shrdq $6,%r14,%r14 xorq %rcx,%rdi addq %r12,%rax shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %rbx,%r14 addq %r13,%rax xorq %rcx,%r15 shrdq $28,%r14,%r14 addq %rax,%r8 addq %r15,%rax movq %r8,%r13 addq %rax,%r14 shrdq $23,%r13,%r13 movq %r14,%rax movq %r9,%r12 shrdq $5,%r14,%r14 xorq %r8,%r13 xorq %r10,%r12 shrdq $4,%r13,%r13 xorq %rax,%r14 andq %r8,%r12 xorq %r8,%r13 addq 64(%rsp),%r11 movq %rax,%r15 xorq %r10,%r12 shrdq $6,%r14,%r14 xorq %rbx,%r15 addq %r12,%r11 shrdq $14,%r13,%r13 andq %r15,%rdi xorq %rax,%r14 addq %r13,%r11 xorq %rbx,%rdi shrdq $28,%r14,%r14 addq %r11,%rdx addq %rdi,%r11 movq %rdx,%r13 addq %r11,%r14 shrdq $23,%r13,%r13 movq %r14,%r11 movq %r8,%r12 shrdq $5,%r14,%r14 xorq %rdx,%r13 xorq %r9,%r12 shrdq $4,%r13,%r13 xorq %r11,%r14 andq %rdx,%r12 xorq %rdx,%r13 addq 72(%rsp),%r10 movq %r11,%rdi xorq %r9,%r12 shrdq $6,%r14,%r14 xorq %rax,%rdi addq %r12,%r10 shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %r11,%r14 addq %r13,%r10 xorq %rax,%r15 shrdq $28,%r14,%r14 addq %r10,%rcx addq %r15,%r10 movq %rcx,%r13 addq %r10,%r14 shrdq $23,%r13,%r13 movq %r14,%r10 movq %rdx,%r12 shrdq $5,%r14,%r14 xorq %rcx,%r13 xorq %r8,%r12 shrdq $4,%r13,%r13 xorq %r10,%r14 andq %rcx,%r12 xorq %rcx,%r13 addq 80(%rsp),%r9 movq %r10,%r15 xorq %r8,%r12 shrdq $6,%r14,%r14 xorq %r11,%r15 addq %r12,%r9 shrdq $14,%r13,%r13 andq %r15,%rdi xorq %r10,%r14 addq %r13,%r9 xorq %r11,%rdi shrdq $28,%r14,%r14 addq %r9,%rbx addq %rdi,%r9 movq %rbx,%r13 addq %r9,%r14 shrdq $23,%r13,%r13 movq %r14,%r9 movq %rcx,%r12 shrdq $5,%r14,%r14 xorq %rbx,%r13 xorq %rdx,%r12 shrdq $4,%r13,%r13 xorq %r9,%r14 andq %rbx,%r12 xorq %rbx,%r13 addq 88(%rsp),%r8 movq %r9,%rdi xorq %rdx,%r12 shrdq $6,%r14,%r14 xorq %r10,%rdi addq %r12,%r8 shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %r9,%r14 addq %r13,%r8 xorq %r10,%r15 shrdq $28,%r14,%r14 addq %r8,%rax addq %r15,%r8 movq %rax,%r13 addq %r8,%r14 shrdq $23,%r13,%r13 movq %r14,%r8 movq %rbx,%r12 shrdq $5,%r14,%r14 xorq %rax,%r13 xorq %rcx,%r12 shrdq $4,%r13,%r13 xorq %r8,%r14 andq %rax,%r12 xorq %rax,%r13 addq 96(%rsp),%rdx movq %r8,%r15 xorq %rcx,%r12 shrdq $6,%r14,%r14 xorq %r9,%r15 addq %r12,%rdx shrdq $14,%r13,%r13 andq %r15,%rdi xorq %r8,%r14 addq %r13,%rdx xorq %r9,%rdi shrdq $28,%r14,%r14 addq %rdx,%r11 addq %rdi,%rdx movq %r11,%r13 addq %rdx,%r14 shrdq $23,%r13,%r13 movq %r14,%rdx movq %rax,%r12 shrdq $5,%r14,%r14 xorq %r11,%r13 xorq %rbx,%r12 shrdq $4,%r13,%r13 xorq %rdx,%r14 andq %r11,%r12 xorq %r11,%r13 addq 104(%rsp),%rcx movq %rdx,%rdi xorq %rbx,%r12 shrdq $6,%r14,%r14 xorq %r8,%rdi addq %r12,%rcx shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %rdx,%r14 addq %r13,%rcx xorq %r8,%r15 shrdq $28,%r14,%r14 addq %rcx,%r10 addq %r15,%rcx movq %r10,%r13 addq %rcx,%r14 shrdq $23,%r13,%r13 movq %r14,%rcx movq %r11,%r12 shrdq $5,%r14,%r14 xorq %r10,%r13 xorq %rax,%r12 shrdq $4,%r13,%r13 xorq %rcx,%r14 andq %r10,%r12 xorq %r10,%r13 addq 112(%rsp),%rbx movq %rcx,%r15 xorq %rax,%r12 shrdq $6,%r14,%r14 xorq %rdx,%r15 addq %r12,%rbx shrdq $14,%r13,%r13 andq %r15,%rdi xorq %rcx,%r14 addq %r13,%rbx xorq %rdx,%rdi shrdq $28,%r14,%r14 addq %rbx,%r9 addq %rdi,%rbx movq %r9,%r13 addq %rbx,%r14 shrdq $23,%r13,%r13 movq %r14,%rbx movq %r10,%r12 shrdq $5,%r14,%r14 xorq %r9,%r13 xorq %r11,%r12 shrdq $4,%r13,%r13 xorq %rbx,%r14 andq %r9,%r12 xorq %r9,%r13 addq 120(%rsp),%rax movq %rbx,%rdi xorq %r11,%r12 shrdq $6,%r14,%r14 xorq %rcx,%rdi addq %r12,%rax shrdq $14,%r13,%r13 andq %rdi,%r15 xorq %rbx,%r14 addq %r13,%rax xorq %rcx,%r15 shrdq $28,%r14,%r14 addq %rax,%r8 addq %r15,%rax movq %r8,%r13 addq %rax,%r14 movq 128+0(%rsp),%rdi movq %r14,%rax addq 0(%rdi),%rax leaq 128(%rsi),%rsi addq 8(%rdi),%rbx addq 16(%rdi),%rcx addq 24(%rdi),%rdx addq 32(%rdi),%r8 addq 40(%rdi),%r9 addq 48(%rdi),%r10 addq 56(%rdi),%r11 cmpq 128+16(%rsp),%rsi movq %rax,0(%rdi) movq %rbx,8(%rdi) movq %rcx,16(%rdi) movq %rdx,24(%rdi) movq %r8,32(%rdi) movq %r9,40(%rdi) movq %r10,48(%rdi) movq %r11,56(%rdi) jb .Lloop_avx movq 152(%rsp),%rsi .cfi_def_cfa %rsi,8 vzeroupper movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue_avx: .byte 0xf3,0xc3 .cfi_endproc .size sha512_block_data_order_avx,.-sha512_block_data_order_avx #endif
marvin-hansen/iggy-streaming-system
18,842
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/rsaz-2k-avx512.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .text .globl rsaz_amm52x20_x1_ifma256 .hidden rsaz_amm52x20_x1_ifma256 .type rsaz_amm52x20_x1_ifma256,@function .align 32 rsaz_amm52x20_x1_ifma256: .cfi_startproc .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lrsaz_amm52x20_x1_ifma256_body: vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 xorl %r9d,%r9d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $5,%ebx .align 32 .Lloop5: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 movq 8(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 movq 16(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 movq 24(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq %r8,%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 leaq 32(%r11),%r11 decl %ebx jne .Lloop5 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm16,%ymm1 vpsrlq $52,%ymm17,%ymm2 vpsrlq $52,%ymm18,%ymm25 vpsrlq $52,%ymm19,%ymm26 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm2,%ymm25,%ymm25 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm16,%ymm16 vpandq .Lmask52x4(%rip),%ymm17,%ymm17 vpandq .Lmask52x4(%rip),%ymm18,%ymm18 vpandq .Lmask52x4(%rip),%ymm19,%ymm19 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm16,%ymm16 vpaddq %ymm2,%ymm17,%ymm17 vpaddq %ymm25,%ymm18,%ymm18 vpaddq %ymm26,%ymm19,%ymm19 vpcmpuq $6,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm16,%k2 vpcmpuq $6,.Lmask52x4(%rip),%ymm17,%k3 vpcmpuq $6,.Lmask52x4(%rip),%ymm18,%k4 vpcmpuq $6,.Lmask52x4(%rip),%ymm19,%k5 kmovb %k1,%r14d kmovb %k2,%r13d kmovb %k3,%r12d kmovb %k4,%r11d kmovb %k5,%r10d vpcmpuq $0,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm16,%k2 vpcmpuq $0,.Lmask52x4(%rip),%ymm17,%k3 vpcmpuq $0,.Lmask52x4(%rip),%ymm18,%k4 vpcmpuq $0,.Lmask52x4(%rip),%ymm19,%k5 kmovb %k1,%r9d kmovb %k2,%r8d kmovb %k3,%ebx kmovb %k4,%ecx kmovb %k5,%edx shlb $4,%r13b orb %r13b,%r14b shlb $4,%r11b orb %r11b,%r12b addb %r14b,%r14b adcb %r12b,%r12b adcb %r10b,%r10b shlb $4,%r8b orb %r8b,%r9b shlb $4,%cl orb %cl,%bl addb %r9b,%r14b adcb %bl,%r12b adcb %dl,%r10b xorb %r9b,%r14b xorb %bl,%r12b xorb %dl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r12d,%k3 shrb $4,%r12b kmovb %r12d,%k4 kmovb %r10d,%k5 vpsubq .Lmask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq .Lmask52x4(%rip),%ymm16,%ymm16{%k2} vpsubq .Lmask52x4(%rip),%ymm17,%ymm17{%k3} vpsubq .Lmask52x4(%rip),%ymm18,%ymm18{%k4} vpsubq .Lmask52x4(%rip),%ymm19,%ymm19{%k5} vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm16,%ymm16 vpandq .Lmask52x4(%rip),%ymm17,%ymm17 vpandq .Lmask52x4(%rip),%ymm18,%ymm18 vpandq .Lmask52x4(%rip),%ymm19,%ymm19 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm16,32(%rdi) vmovdqu64 %ymm17,64(%rdi) vmovdqu64 %ymm18,96(%rdi) vmovdqu64 %ymm19,128(%rdi) vzeroupper movq 0(%rsp),%r15 .cfi_restore %r15 movq 8(%rsp),%r14 .cfi_restore %r14 movq 16(%rsp),%r13 .cfi_restore %r13 movq 24(%rsp),%r12 .cfi_restore %r12 movq 32(%rsp),%rbp .cfi_restore %rbp movq 40(%rsp),%rbx .cfi_restore %rbx leaq 48(%rsp),%rsp .cfi_adjust_cfa_offset -48 .Lrsaz_amm52x20_x1_ifma256_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size rsaz_amm52x20_x1_ifma256, .-rsaz_amm52x20_x1_ifma256 .section .rodata .align 32 .Lmask52x4: .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .quad 0xfffffffffffff .text .text .globl rsaz_amm52x20_x2_ifma256 .hidden rsaz_amm52x20_x2_ifma256 .type rsaz_amm52x20_x2_ifma256,@function .align 32 rsaz_amm52x20_x2_ifma256: .cfi_startproc .byte 243,15,30,250 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lrsaz_amm52x20_x2_ifma256_body: vpxord %ymm0,%ymm0,%ymm0 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm20 vmovdqa64 %ymm0,%ymm21 vmovdqa64 %ymm0,%ymm22 vmovdqa64 %ymm0,%ymm23 xorl %r9d,%r9d xorl %r15d,%r15d movq %rdx,%r11 movq $0xfffffffffffff,%rax movl $20,%ebx .align 32 .Lloop20: movq 0(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 0(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 movq %r12,%r10 adcq $0,%r10 movq (%r8),%r13 imulq %r9,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 0(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r9 adcq %r12,%r10 shrq $52,%r9 salq $12,%r10 orq %r10,%r9 vpmadd52luq 0(%rsi),%ymm1,%ymm3 vpmadd52luq 32(%rsi),%ymm1,%ymm16 vpmadd52luq 64(%rsi),%ymm1,%ymm17 vpmadd52luq 96(%rsi),%ymm1,%ymm18 vpmadd52luq 128(%rsi),%ymm1,%ymm19 vpmadd52luq 0(%rcx),%ymm2,%ymm3 vpmadd52luq 32(%rcx),%ymm2,%ymm16 vpmadd52luq 64(%rcx),%ymm2,%ymm17 vpmadd52luq 96(%rcx),%ymm2,%ymm18 vpmadd52luq 128(%rcx),%ymm2,%ymm19 valignq $1,%ymm3,%ymm16,%ymm3 valignq $1,%ymm16,%ymm17,%ymm16 valignq $1,%ymm17,%ymm18,%ymm17 valignq $1,%ymm18,%ymm19,%ymm18 valignq $1,%ymm19,%ymm0,%ymm19 vmovq %xmm3,%r13 addq %r13,%r9 vpmadd52huq 0(%rsi),%ymm1,%ymm3 vpmadd52huq 32(%rsi),%ymm1,%ymm16 vpmadd52huq 64(%rsi),%ymm1,%ymm17 vpmadd52huq 96(%rsi),%ymm1,%ymm18 vpmadd52huq 128(%rsi),%ymm1,%ymm19 vpmadd52huq 0(%rcx),%ymm2,%ymm3 vpmadd52huq 32(%rcx),%ymm2,%ymm16 vpmadd52huq 64(%rcx),%ymm2,%ymm17 vpmadd52huq 96(%rcx),%ymm2,%ymm18 vpmadd52huq 128(%rcx),%ymm2,%ymm19 movq 160(%r11),%r13 vpbroadcastq %r13,%ymm1 movq 160(%rsi),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 movq %r12,%r10 adcq $0,%r10 movq 8(%r8),%r13 imulq %r15,%r13 andq %rax,%r13 vpbroadcastq %r13,%ymm2 movq 160(%rcx),%rdx mulxq %r13,%r13,%r12 addq %r13,%r15 adcq %r12,%r10 shrq $52,%r15 salq $12,%r10 orq %r10,%r15 vpmadd52luq 160(%rsi),%ymm1,%ymm4 vpmadd52luq 192(%rsi),%ymm1,%ymm20 vpmadd52luq 224(%rsi),%ymm1,%ymm21 vpmadd52luq 256(%rsi),%ymm1,%ymm22 vpmadd52luq 288(%rsi),%ymm1,%ymm23 vpmadd52luq 160(%rcx),%ymm2,%ymm4 vpmadd52luq 192(%rcx),%ymm2,%ymm20 vpmadd52luq 224(%rcx),%ymm2,%ymm21 vpmadd52luq 256(%rcx),%ymm2,%ymm22 vpmadd52luq 288(%rcx),%ymm2,%ymm23 valignq $1,%ymm4,%ymm20,%ymm4 valignq $1,%ymm20,%ymm21,%ymm20 valignq $1,%ymm21,%ymm22,%ymm21 valignq $1,%ymm22,%ymm23,%ymm22 valignq $1,%ymm23,%ymm0,%ymm23 vmovq %xmm4,%r13 addq %r13,%r15 vpmadd52huq 160(%rsi),%ymm1,%ymm4 vpmadd52huq 192(%rsi),%ymm1,%ymm20 vpmadd52huq 224(%rsi),%ymm1,%ymm21 vpmadd52huq 256(%rsi),%ymm1,%ymm22 vpmadd52huq 288(%rsi),%ymm1,%ymm23 vpmadd52huq 160(%rcx),%ymm2,%ymm4 vpmadd52huq 192(%rcx),%ymm2,%ymm20 vpmadd52huq 224(%rcx),%ymm2,%ymm21 vpmadd52huq 256(%rcx),%ymm2,%ymm22 vpmadd52huq 288(%rcx),%ymm2,%ymm23 leaq 8(%r11),%r11 decl %ebx jne .Lloop20 vpbroadcastq %r9,%ymm0 vpblendd $3,%ymm0,%ymm3,%ymm3 vpsrlq $52,%ymm3,%ymm0 vpsrlq $52,%ymm16,%ymm1 vpsrlq $52,%ymm17,%ymm2 vpsrlq $52,%ymm18,%ymm25 vpsrlq $52,%ymm19,%ymm26 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm2,%ymm25,%ymm25 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm16,%ymm16 vpandq .Lmask52x4(%rip),%ymm17,%ymm17 vpandq .Lmask52x4(%rip),%ymm18,%ymm18 vpandq .Lmask52x4(%rip),%ymm19,%ymm19 vpaddq %ymm0,%ymm3,%ymm3 vpaddq %ymm1,%ymm16,%ymm16 vpaddq %ymm2,%ymm17,%ymm17 vpaddq %ymm25,%ymm18,%ymm18 vpaddq %ymm26,%ymm19,%ymm19 vpcmpuq $6,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm16,%k2 vpcmpuq $6,.Lmask52x4(%rip),%ymm17,%k3 vpcmpuq $6,.Lmask52x4(%rip),%ymm18,%k4 vpcmpuq $6,.Lmask52x4(%rip),%ymm19,%k5 kmovb %k1,%r14d kmovb %k2,%r13d kmovb %k3,%r12d kmovb %k4,%r11d kmovb %k5,%r10d vpcmpuq $0,.Lmask52x4(%rip),%ymm3,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm16,%k2 vpcmpuq $0,.Lmask52x4(%rip),%ymm17,%k3 vpcmpuq $0,.Lmask52x4(%rip),%ymm18,%k4 vpcmpuq $0,.Lmask52x4(%rip),%ymm19,%k5 kmovb %k1,%r9d kmovb %k2,%r8d kmovb %k3,%ebx kmovb %k4,%ecx kmovb %k5,%edx shlb $4,%r13b orb %r13b,%r14b shlb $4,%r11b orb %r11b,%r12b addb %r14b,%r14b adcb %r12b,%r12b adcb %r10b,%r10b shlb $4,%r8b orb %r8b,%r9b shlb $4,%cl orb %cl,%bl addb %r9b,%r14b adcb %bl,%r12b adcb %dl,%r10b xorb %r9b,%r14b xorb %bl,%r12b xorb %dl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r12d,%k3 shrb $4,%r12b kmovb %r12d,%k4 kmovb %r10d,%k5 vpsubq .Lmask52x4(%rip),%ymm3,%ymm3{%k1} vpsubq .Lmask52x4(%rip),%ymm16,%ymm16{%k2} vpsubq .Lmask52x4(%rip),%ymm17,%ymm17{%k3} vpsubq .Lmask52x4(%rip),%ymm18,%ymm18{%k4} vpsubq .Lmask52x4(%rip),%ymm19,%ymm19{%k5} vpandq .Lmask52x4(%rip),%ymm3,%ymm3 vpandq .Lmask52x4(%rip),%ymm16,%ymm16 vpandq .Lmask52x4(%rip),%ymm17,%ymm17 vpandq .Lmask52x4(%rip),%ymm18,%ymm18 vpandq .Lmask52x4(%rip),%ymm19,%ymm19 vpbroadcastq %r15,%ymm0 vpblendd $3,%ymm0,%ymm4,%ymm4 vpsrlq $52,%ymm4,%ymm0 vpsrlq $52,%ymm20,%ymm1 vpsrlq $52,%ymm21,%ymm2 vpsrlq $52,%ymm22,%ymm25 vpsrlq $52,%ymm23,%ymm26 valignq $3,%ymm25,%ymm26,%ymm26 valignq $3,%ymm2,%ymm25,%ymm25 valignq $3,%ymm1,%ymm2,%ymm2 valignq $3,%ymm0,%ymm1,%ymm1 valignq $3,.Lzeros(%rip),%ymm0,%ymm0 vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm20,%ymm20 vpandq .Lmask52x4(%rip),%ymm21,%ymm21 vpandq .Lmask52x4(%rip),%ymm22,%ymm22 vpandq .Lmask52x4(%rip),%ymm23,%ymm23 vpaddq %ymm0,%ymm4,%ymm4 vpaddq %ymm1,%ymm20,%ymm20 vpaddq %ymm2,%ymm21,%ymm21 vpaddq %ymm25,%ymm22,%ymm22 vpaddq %ymm26,%ymm23,%ymm23 vpcmpuq $6,.Lmask52x4(%rip),%ymm4,%k1 vpcmpuq $6,.Lmask52x4(%rip),%ymm20,%k2 vpcmpuq $6,.Lmask52x4(%rip),%ymm21,%k3 vpcmpuq $6,.Lmask52x4(%rip),%ymm22,%k4 vpcmpuq $6,.Lmask52x4(%rip),%ymm23,%k5 kmovb %k1,%r14d kmovb %k2,%r13d kmovb %k3,%r12d kmovb %k4,%r11d kmovb %k5,%r10d vpcmpuq $0,.Lmask52x4(%rip),%ymm4,%k1 vpcmpuq $0,.Lmask52x4(%rip),%ymm20,%k2 vpcmpuq $0,.Lmask52x4(%rip),%ymm21,%k3 vpcmpuq $0,.Lmask52x4(%rip),%ymm22,%k4 vpcmpuq $0,.Lmask52x4(%rip),%ymm23,%k5 kmovb %k1,%r9d kmovb %k2,%r8d kmovb %k3,%ebx kmovb %k4,%ecx kmovb %k5,%edx shlb $4,%r13b orb %r13b,%r14b shlb $4,%r11b orb %r11b,%r12b addb %r14b,%r14b adcb %r12b,%r12b adcb %r10b,%r10b shlb $4,%r8b orb %r8b,%r9b shlb $4,%cl orb %cl,%bl addb %r9b,%r14b adcb %bl,%r12b adcb %dl,%r10b xorb %r9b,%r14b xorb %bl,%r12b xorb %dl,%r10b kmovb %r14d,%k1 shrb $4,%r14b kmovb %r14d,%k2 kmovb %r12d,%k3 shrb $4,%r12b kmovb %r12d,%k4 kmovb %r10d,%k5 vpsubq .Lmask52x4(%rip),%ymm4,%ymm4{%k1} vpsubq .Lmask52x4(%rip),%ymm20,%ymm20{%k2} vpsubq .Lmask52x4(%rip),%ymm21,%ymm21{%k3} vpsubq .Lmask52x4(%rip),%ymm22,%ymm22{%k4} vpsubq .Lmask52x4(%rip),%ymm23,%ymm23{%k5} vpandq .Lmask52x4(%rip),%ymm4,%ymm4 vpandq .Lmask52x4(%rip),%ymm20,%ymm20 vpandq .Lmask52x4(%rip),%ymm21,%ymm21 vpandq .Lmask52x4(%rip),%ymm22,%ymm22 vpandq .Lmask52x4(%rip),%ymm23,%ymm23 vmovdqu64 %ymm3,0(%rdi) vmovdqu64 %ymm16,32(%rdi) vmovdqu64 %ymm17,64(%rdi) vmovdqu64 %ymm18,96(%rdi) vmovdqu64 %ymm19,128(%rdi) vmovdqu64 %ymm4,160(%rdi) vmovdqu64 %ymm20,192(%rdi) vmovdqu64 %ymm21,224(%rdi) vmovdqu64 %ymm22,256(%rdi) vmovdqu64 %ymm23,288(%rdi) vzeroupper movq 0(%rsp),%r15 .cfi_restore %r15 movq 8(%rsp),%r14 .cfi_restore %r14 movq 16(%rsp),%r13 .cfi_restore %r13 movq 24(%rsp),%r12 .cfi_restore %r12 movq 32(%rsp),%rbp .cfi_restore %rbp movq 40(%rsp),%rbx .cfi_restore %rbx leaq 48(%rsp),%rsp .cfi_adjust_cfa_offset -48 .Lrsaz_amm52x20_x2_ifma256_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size rsaz_amm52x20_x2_ifma256, .-rsaz_amm52x20_x2_ifma256 .text .align 32 .globl extract_multiplier_2x20_win5 .hidden extract_multiplier_2x20_win5 .type extract_multiplier_2x20_win5,@function extract_multiplier_2x20_win5: .cfi_startproc .byte 243,15,30,250 vmovdqa64 .Lones(%rip),%ymm24 vpbroadcastq %rdx,%ymm22 vpbroadcastq %rcx,%ymm23 leaq 10240(%rsi),%rax vpxor %xmm0,%xmm0,%xmm0 vmovdqa64 %ymm0,%ymm21 vmovdqa64 %ymm0,%ymm1 vmovdqa64 %ymm0,%ymm2 vmovdqa64 %ymm0,%ymm3 vmovdqa64 %ymm0,%ymm4 vmovdqa64 %ymm0,%ymm5 vmovdqa64 %ymm0,%ymm16 vmovdqa64 %ymm0,%ymm17 vmovdqa64 %ymm0,%ymm18 vmovdqa64 %ymm0,%ymm19 .align 32 .Lloop: vpcmpq $0,%ymm21,%ymm22,%k1 vpcmpq $0,%ymm21,%ymm23,%k2 vmovdqu64 0(%rsi),%ymm20 vpblendmq %ymm20,%ymm0,%ymm0{%k1} vmovdqu64 32(%rsi),%ymm20 vpblendmq %ymm20,%ymm1,%ymm1{%k1} vmovdqu64 64(%rsi),%ymm20 vpblendmq %ymm20,%ymm2,%ymm2{%k1} vmovdqu64 96(%rsi),%ymm20 vpblendmq %ymm20,%ymm3,%ymm3{%k1} vmovdqu64 128(%rsi),%ymm20 vpblendmq %ymm20,%ymm4,%ymm4{%k1} vmovdqu64 160(%rsi),%ymm20 vpblendmq %ymm20,%ymm5,%ymm5{%k2} vmovdqu64 192(%rsi),%ymm20 vpblendmq %ymm20,%ymm16,%ymm16{%k2} vmovdqu64 224(%rsi),%ymm20 vpblendmq %ymm20,%ymm17,%ymm17{%k2} vmovdqu64 256(%rsi),%ymm20 vpblendmq %ymm20,%ymm18,%ymm18{%k2} vmovdqu64 288(%rsi),%ymm20 vpblendmq %ymm20,%ymm19,%ymm19{%k2} vpaddq %ymm24,%ymm21,%ymm21 addq $320,%rsi cmpq %rsi,%rax jne .Lloop vmovdqu64 %ymm0,0(%rdi) vmovdqu64 %ymm1,32(%rdi) vmovdqu64 %ymm2,64(%rdi) vmovdqu64 %ymm3,96(%rdi) vmovdqu64 %ymm4,128(%rdi) vmovdqu64 %ymm5,160(%rdi) vmovdqu64 %ymm16,192(%rdi) vmovdqu64 %ymm17,224(%rdi) vmovdqu64 %ymm18,256(%rdi) vmovdqu64 %ymm19,288(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size extract_multiplier_2x20_win5, .-extract_multiplier_2x20_win5 .section .rodata .align 32 .Lones: .quad 1,1,1,1 .Lzeros: .quad 0,0,0,0 .text #endif #endif
marvin-hansen/iggy-streaming-system
19,029
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/vpaes-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .type _vpaes_encrypt_core,@function .align 16 _vpaes_encrypt_core: .cfi_startproc movq %rdx,%r9 movq $16,%r11 movl 240(%rdx),%eax movdqa %xmm9,%xmm1 movdqa .Lk_ipt(%rip),%xmm2 pandn %xmm0,%xmm1 movdqu (%r9),%xmm5 psrld $4,%xmm1 pand %xmm9,%xmm0 .byte 102,15,56,0,208 movdqa .Lk_ipt+16(%rip),%xmm0 .byte 102,15,56,0,193 pxor %xmm5,%xmm2 addq $16,%r9 pxor %xmm2,%xmm0 leaq .Lk_mc_backward(%rip),%r10 jmp .Lenc_entry .align 16 .Lenc_loop: movdqa %xmm13,%xmm4 movdqa %xmm12,%xmm0 .byte 102,15,56,0,226 .byte 102,15,56,0,195 pxor %xmm5,%xmm4 movdqa %xmm15,%xmm5 pxor %xmm4,%xmm0 movdqa -64(%r11,%r10,1),%xmm1 .byte 102,15,56,0,234 movdqa (%r11,%r10,1),%xmm4 movdqa %xmm14,%xmm2 .byte 102,15,56,0,211 movdqa %xmm0,%xmm3 pxor %xmm5,%xmm2 .byte 102,15,56,0,193 addq $16,%r9 pxor %xmm2,%xmm0 .byte 102,15,56,0,220 addq $16,%r11 pxor %xmm0,%xmm3 .byte 102,15,56,0,193 andq $0x30,%r11 subq $1,%rax pxor %xmm3,%xmm0 .Lenc_entry: movdqa %xmm9,%xmm1 movdqa %xmm11,%xmm5 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm9,%xmm0 .byte 102,15,56,0,232 movdqa %xmm10,%xmm3 pxor %xmm1,%xmm0 .byte 102,15,56,0,217 movdqa %xmm10,%xmm4 pxor %xmm5,%xmm3 .byte 102,15,56,0,224 movdqa %xmm10,%xmm2 pxor %xmm5,%xmm4 .byte 102,15,56,0,211 movdqa %xmm10,%xmm3 pxor %xmm0,%xmm2 .byte 102,15,56,0,220 movdqu (%r9),%xmm5 pxor %xmm1,%xmm3 jnz .Lenc_loop movdqa -96(%r10),%xmm4 movdqa -80(%r10),%xmm0 .byte 102,15,56,0,226 pxor %xmm5,%xmm4 .byte 102,15,56,0,195 movdqa 64(%r11,%r10,1),%xmm1 pxor %xmm4,%xmm0 .byte 102,15,56,0,193 .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_encrypt_core,.-_vpaes_encrypt_core .type _vpaes_encrypt_core_2x,@function .align 16 _vpaes_encrypt_core_2x: .cfi_startproc movq %rdx,%r9 movq $16,%r11 movl 240(%rdx),%eax movdqa %xmm9,%xmm1 movdqa %xmm9,%xmm7 movdqa .Lk_ipt(%rip),%xmm2 movdqa %xmm2,%xmm8 pandn %xmm0,%xmm1 pandn %xmm6,%xmm7 movdqu (%r9),%xmm5 psrld $4,%xmm1 psrld $4,%xmm7 pand %xmm9,%xmm0 pand %xmm9,%xmm6 .byte 102,15,56,0,208 .byte 102,68,15,56,0,198 movdqa .Lk_ipt+16(%rip),%xmm0 movdqa %xmm0,%xmm6 .byte 102,15,56,0,193 .byte 102,15,56,0,247 pxor %xmm5,%xmm2 pxor %xmm5,%xmm8 addq $16,%r9 pxor %xmm2,%xmm0 pxor %xmm8,%xmm6 leaq .Lk_mc_backward(%rip),%r10 jmp .Lenc2x_entry .align 16 .Lenc2x_loop: movdqa .Lk_sb1(%rip),%xmm4 movdqa .Lk_sb1+16(%rip),%xmm0 movdqa %xmm4,%xmm12 movdqa %xmm0,%xmm6 .byte 102,15,56,0,226 .byte 102,69,15,56,0,224 .byte 102,15,56,0,195 .byte 102,65,15,56,0,243 pxor %xmm5,%xmm4 pxor %xmm5,%xmm12 movdqa .Lk_sb2(%rip),%xmm5 movdqa %xmm5,%xmm13 pxor %xmm4,%xmm0 pxor %xmm12,%xmm6 movdqa -64(%r11,%r10,1),%xmm1 .byte 102,15,56,0,234 .byte 102,69,15,56,0,232 movdqa (%r11,%r10,1),%xmm4 movdqa .Lk_sb2+16(%rip),%xmm2 movdqa %xmm2,%xmm8 .byte 102,15,56,0,211 .byte 102,69,15,56,0,195 movdqa %xmm0,%xmm3 movdqa %xmm6,%xmm11 pxor %xmm5,%xmm2 pxor %xmm13,%xmm8 .byte 102,15,56,0,193 .byte 102,15,56,0,241 addq $16,%r9 pxor %xmm2,%xmm0 pxor %xmm8,%xmm6 .byte 102,15,56,0,220 .byte 102,68,15,56,0,220 addq $16,%r11 pxor %xmm0,%xmm3 pxor %xmm6,%xmm11 .byte 102,15,56,0,193 .byte 102,15,56,0,241 andq $0x30,%r11 subq $1,%rax pxor %xmm3,%xmm0 pxor %xmm11,%xmm6 .Lenc2x_entry: movdqa %xmm9,%xmm1 movdqa %xmm9,%xmm7 movdqa .Lk_inv+16(%rip),%xmm5 movdqa %xmm5,%xmm13 pandn %xmm0,%xmm1 pandn %xmm6,%xmm7 psrld $4,%xmm1 psrld $4,%xmm7 pand %xmm9,%xmm0 pand %xmm9,%xmm6 .byte 102,15,56,0,232 .byte 102,68,15,56,0,238 movdqa %xmm10,%xmm3 movdqa %xmm10,%xmm11 pxor %xmm1,%xmm0 pxor %xmm7,%xmm6 .byte 102,15,56,0,217 .byte 102,68,15,56,0,223 movdqa %xmm10,%xmm4 movdqa %xmm10,%xmm12 pxor %xmm5,%xmm3 pxor %xmm13,%xmm11 .byte 102,15,56,0,224 .byte 102,68,15,56,0,230 movdqa %xmm10,%xmm2 movdqa %xmm10,%xmm8 pxor %xmm5,%xmm4 pxor %xmm13,%xmm12 .byte 102,15,56,0,211 .byte 102,69,15,56,0,195 movdqa %xmm10,%xmm3 movdqa %xmm10,%xmm11 pxor %xmm0,%xmm2 pxor %xmm6,%xmm8 .byte 102,15,56,0,220 .byte 102,69,15,56,0,220 movdqu (%r9),%xmm5 pxor %xmm1,%xmm3 pxor %xmm7,%xmm11 jnz .Lenc2x_loop movdqa -96(%r10),%xmm4 movdqa -80(%r10),%xmm0 movdqa %xmm4,%xmm12 movdqa %xmm0,%xmm6 .byte 102,15,56,0,226 .byte 102,69,15,56,0,224 pxor %xmm5,%xmm4 pxor %xmm5,%xmm12 .byte 102,15,56,0,195 .byte 102,65,15,56,0,243 movdqa 64(%r11,%r10,1),%xmm1 pxor %xmm4,%xmm0 pxor %xmm12,%xmm6 .byte 102,15,56,0,193 .byte 102,15,56,0,241 .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_encrypt_core_2x,.-_vpaes_encrypt_core_2x .type _vpaes_decrypt_core,@function .align 16 _vpaes_decrypt_core: .cfi_startproc movq %rdx,%r9 movl 240(%rdx),%eax movdqa %xmm9,%xmm1 movdqa .Lk_dipt(%rip),%xmm2 pandn %xmm0,%xmm1 movq %rax,%r11 psrld $4,%xmm1 movdqu (%r9),%xmm5 shlq $4,%r11 pand %xmm9,%xmm0 .byte 102,15,56,0,208 movdqa .Lk_dipt+16(%rip),%xmm0 xorq $0x30,%r11 leaq .Lk_dsbd(%rip),%r10 .byte 102,15,56,0,193 andq $0x30,%r11 pxor %xmm5,%xmm2 movdqa .Lk_mc_forward+48(%rip),%xmm5 pxor %xmm2,%xmm0 addq $16,%r9 addq %r10,%r11 jmp .Ldec_entry .align 16 .Ldec_loop: movdqa -32(%r10),%xmm4 movdqa -16(%r10),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa 0(%r10),%xmm4 pxor %xmm1,%xmm0 movdqa 16(%r10),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa 32(%r10),%xmm4 pxor %xmm1,%xmm0 movdqa 48(%r10),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa 64(%r10),%xmm4 pxor %xmm1,%xmm0 movdqa 80(%r10),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 addq $16,%r9 .byte 102,15,58,15,237,12 pxor %xmm1,%xmm0 subq $1,%rax .Ldec_entry: movdqa %xmm9,%xmm1 pandn %xmm0,%xmm1 movdqa %xmm11,%xmm2 psrld $4,%xmm1 pand %xmm9,%xmm0 .byte 102,15,56,0,208 movdqa %xmm10,%xmm3 pxor %xmm1,%xmm0 .byte 102,15,56,0,217 movdqa %xmm10,%xmm4 pxor %xmm2,%xmm3 .byte 102,15,56,0,224 pxor %xmm2,%xmm4 movdqa %xmm10,%xmm2 .byte 102,15,56,0,211 movdqa %xmm10,%xmm3 pxor %xmm0,%xmm2 .byte 102,15,56,0,220 movdqu (%r9),%xmm0 pxor %xmm1,%xmm3 jnz .Ldec_loop movdqa 96(%r10),%xmm4 .byte 102,15,56,0,226 pxor %xmm0,%xmm4 movdqa 112(%r10),%xmm0 movdqa -352(%r11),%xmm2 .byte 102,15,56,0,195 pxor %xmm4,%xmm0 .byte 102,15,56,0,194 .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_decrypt_core,.-_vpaes_decrypt_core .type _vpaes_schedule_core,@function .align 16 _vpaes_schedule_core: .cfi_startproc call _vpaes_preheat movdqa .Lk_rcon(%rip),%xmm8 movdqu (%rdi),%xmm0 movdqa %xmm0,%xmm3 leaq .Lk_ipt(%rip),%r11 call _vpaes_schedule_transform movdqa %xmm0,%xmm7 leaq .Lk_sr(%rip),%r10 testq %rcx,%rcx jnz .Lschedule_am_decrypting movdqu %xmm0,(%rdx) jmp .Lschedule_go .Lschedule_am_decrypting: movdqa (%r8,%r10,1),%xmm1 .byte 102,15,56,0,217 movdqu %xmm3,(%rdx) xorq $0x30,%r8 .Lschedule_go: cmpl $192,%esi ja .Lschedule_256 je .Lschedule_192 .Lschedule_128: movl $10,%esi .Loop_schedule_128: call _vpaes_schedule_round decq %rsi jz .Lschedule_mangle_last call _vpaes_schedule_mangle jmp .Loop_schedule_128 .align 16 .Lschedule_192: movdqu 8(%rdi),%xmm0 call _vpaes_schedule_transform movdqa %xmm0,%xmm6 pxor %xmm4,%xmm4 movhlps %xmm4,%xmm6 movl $4,%esi .Loop_schedule_192: call _vpaes_schedule_round .byte 102,15,58,15,198,8 call _vpaes_schedule_mangle call _vpaes_schedule_192_smear call _vpaes_schedule_mangle call _vpaes_schedule_round decq %rsi jz .Lschedule_mangle_last call _vpaes_schedule_mangle call _vpaes_schedule_192_smear jmp .Loop_schedule_192 .align 16 .Lschedule_256: movdqu 16(%rdi),%xmm0 call _vpaes_schedule_transform movl $7,%esi .Loop_schedule_256: call _vpaes_schedule_mangle movdqa %xmm0,%xmm6 call _vpaes_schedule_round decq %rsi jz .Lschedule_mangle_last call _vpaes_schedule_mangle pshufd $0xFF,%xmm0,%xmm0 movdqa %xmm7,%xmm5 movdqa %xmm6,%xmm7 call _vpaes_schedule_low_round movdqa %xmm5,%xmm7 jmp .Loop_schedule_256 .align 16 .Lschedule_mangle_last: leaq .Lk_deskew(%rip),%r11 testq %rcx,%rcx jnz .Lschedule_mangle_last_dec movdqa (%r8,%r10,1),%xmm1 .byte 102,15,56,0,193 leaq .Lk_opt(%rip),%r11 addq $32,%rdx .Lschedule_mangle_last_dec: addq $-16,%rdx pxor .Lk_s63(%rip),%xmm0 call _vpaes_schedule_transform movdqu %xmm0,(%rdx) pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_schedule_core,.-_vpaes_schedule_core .type _vpaes_schedule_192_smear,@function .align 16 _vpaes_schedule_192_smear: .cfi_startproc pshufd $0x80,%xmm6,%xmm1 pshufd $0xFE,%xmm7,%xmm0 pxor %xmm1,%xmm6 pxor %xmm1,%xmm1 pxor %xmm0,%xmm6 movdqa %xmm6,%xmm0 movhlps %xmm1,%xmm6 .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear .type _vpaes_schedule_round,@function .align 16 _vpaes_schedule_round: .cfi_startproc pxor %xmm1,%xmm1 .byte 102,65,15,58,15,200,15 .byte 102,69,15,58,15,192,15 pxor %xmm1,%xmm7 pshufd $0xFF,%xmm0,%xmm0 .byte 102,15,58,15,192,1 _vpaes_schedule_low_round: movdqa %xmm7,%xmm1 pslldq $4,%xmm7 pxor %xmm1,%xmm7 movdqa %xmm7,%xmm1 pslldq $8,%xmm7 pxor %xmm1,%xmm7 pxor .Lk_s63(%rip),%xmm7 movdqa %xmm9,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm9,%xmm0 movdqa %xmm11,%xmm2 .byte 102,15,56,0,208 pxor %xmm1,%xmm0 movdqa %xmm10,%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 movdqa %xmm10,%xmm4 .byte 102,15,56,0,224 pxor %xmm2,%xmm4 movdqa %xmm10,%xmm2 .byte 102,15,56,0,211 pxor %xmm0,%xmm2 movdqa %xmm10,%xmm3 .byte 102,15,56,0,220 pxor %xmm1,%xmm3 movdqa %xmm13,%xmm4 .byte 102,15,56,0,226 movdqa %xmm12,%xmm0 .byte 102,15,56,0,195 pxor %xmm4,%xmm0 pxor %xmm7,%xmm0 movdqa %xmm0,%xmm7 .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_schedule_round,.-_vpaes_schedule_round .type _vpaes_schedule_transform,@function .align 16 _vpaes_schedule_transform: .cfi_startproc movdqa %xmm9,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm9,%xmm0 movdqa (%r11),%xmm2 .byte 102,15,56,0,208 movdqa 16(%r11),%xmm0 .byte 102,15,56,0,193 pxor %xmm2,%xmm0 .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_schedule_transform,.-_vpaes_schedule_transform .type _vpaes_schedule_mangle,@function .align 16 _vpaes_schedule_mangle: .cfi_startproc movdqa %xmm0,%xmm4 movdqa .Lk_mc_forward(%rip),%xmm5 testq %rcx,%rcx jnz .Lschedule_mangle_dec addq $16,%rdx pxor .Lk_s63(%rip),%xmm4 .byte 102,15,56,0,229 movdqa %xmm4,%xmm3 .byte 102,15,56,0,229 pxor %xmm4,%xmm3 .byte 102,15,56,0,229 pxor %xmm4,%xmm3 jmp .Lschedule_mangle_both .align 16 .Lschedule_mangle_dec: leaq .Lk_dksd(%rip),%r11 movdqa %xmm9,%xmm1 pandn %xmm4,%xmm1 psrld $4,%xmm1 pand %xmm9,%xmm4 movdqa 0(%r11),%xmm2 .byte 102,15,56,0,212 movdqa 16(%r11),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 32(%r11),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 48(%r11),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 64(%r11),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 80(%r11),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 96(%r11),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 112(%r11),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 addq $-16,%rdx .Lschedule_mangle_both: movdqa (%r8,%r10,1),%xmm1 .byte 102,15,56,0,217 addq $-16,%r8 andq $0x30,%r8 movdqu %xmm3,(%rdx) .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle .globl vpaes_set_encrypt_key .hidden vpaes_set_encrypt_key .type vpaes_set_encrypt_key,@function .align 16 vpaes_set_encrypt_key: .cfi_startproc _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST .extern BORINGSSL_function_hit .hidden BORINGSSL_function_hit movb $1,BORINGSSL_function_hit+5(%rip) #endif movl %esi,%eax shrl $5,%eax addl $5,%eax movl %eax,240(%rdx) movl $0,%ecx movl $0x30,%r8d call _vpaes_schedule_core xorl %eax,%eax .byte 0xf3,0xc3 .cfi_endproc .size vpaes_set_encrypt_key,.-vpaes_set_encrypt_key .globl vpaes_set_decrypt_key .hidden vpaes_set_decrypt_key .type vpaes_set_decrypt_key,@function .align 16 vpaes_set_decrypt_key: .cfi_startproc _CET_ENDBR movl %esi,%eax shrl $5,%eax addl $5,%eax movl %eax,240(%rdx) shll $4,%eax leaq 16(%rdx,%rax,1),%rdx movl $1,%ecx movl %esi,%r8d shrl $1,%r8d andl $32,%r8d xorl $32,%r8d call _vpaes_schedule_core xorl %eax,%eax .byte 0xf3,0xc3 .cfi_endproc .size vpaes_set_decrypt_key,.-vpaes_set_decrypt_key .globl vpaes_encrypt .hidden vpaes_encrypt .type vpaes_encrypt,@function .align 16 vpaes_encrypt: .cfi_startproc _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST .extern BORINGSSL_function_hit .hidden BORINGSSL_function_hit movb $1,BORINGSSL_function_hit+4(%rip) #endif movdqu (%rdi),%xmm0 call _vpaes_preheat call _vpaes_encrypt_core movdqu %xmm0,(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size vpaes_encrypt,.-vpaes_encrypt .globl vpaes_decrypt .hidden vpaes_decrypt .type vpaes_decrypt,@function .align 16 vpaes_decrypt: .cfi_startproc _CET_ENDBR movdqu (%rdi),%xmm0 call _vpaes_preheat call _vpaes_decrypt_core movdqu %xmm0,(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size vpaes_decrypt,.-vpaes_decrypt .globl vpaes_cbc_encrypt .hidden vpaes_cbc_encrypt .type vpaes_cbc_encrypt,@function .align 16 vpaes_cbc_encrypt: .cfi_startproc _CET_ENDBR xchgq %rcx,%rdx subq $16,%rcx jc .Lcbc_abort movdqu (%r8),%xmm6 subq %rdi,%rsi call _vpaes_preheat cmpl $0,%r9d je .Lcbc_dec_loop jmp .Lcbc_enc_loop .align 16 .Lcbc_enc_loop: movdqu (%rdi),%xmm0 pxor %xmm6,%xmm0 call _vpaes_encrypt_core movdqa %xmm0,%xmm6 movdqu %xmm0,(%rsi,%rdi,1) leaq 16(%rdi),%rdi subq $16,%rcx jnc .Lcbc_enc_loop jmp .Lcbc_done .align 16 .Lcbc_dec_loop: movdqu (%rdi),%xmm0 movdqa %xmm0,%xmm7 call _vpaes_decrypt_core pxor %xmm6,%xmm0 movdqa %xmm7,%xmm6 movdqu %xmm0,(%rsi,%rdi,1) leaq 16(%rdi),%rdi subq $16,%rcx jnc .Lcbc_dec_loop .Lcbc_done: movdqu %xmm6,(%r8) .Lcbc_abort: .byte 0xf3,0xc3 .cfi_endproc .size vpaes_cbc_encrypt,.-vpaes_cbc_encrypt .globl vpaes_ctr32_encrypt_blocks .hidden vpaes_ctr32_encrypt_blocks .type vpaes_ctr32_encrypt_blocks,@function .align 16 vpaes_ctr32_encrypt_blocks: .cfi_startproc _CET_ENDBR xchgq %rcx,%rdx testq %rcx,%rcx jz .Lctr32_abort movdqu (%r8),%xmm0 movdqa .Lctr_add_one(%rip),%xmm8 subq %rdi,%rsi call _vpaes_preheat movdqa %xmm0,%xmm6 pshufb .Lrev_ctr(%rip),%xmm6 testq $1,%rcx jz .Lctr32_prep_loop movdqu (%rdi),%xmm7 call _vpaes_encrypt_core pxor %xmm7,%xmm0 paddd %xmm8,%xmm6 movdqu %xmm0,(%rsi,%rdi,1) subq $1,%rcx leaq 16(%rdi),%rdi jz .Lctr32_done .Lctr32_prep_loop: movdqa %xmm6,%xmm14 movdqa %xmm6,%xmm15 paddd %xmm8,%xmm15 .Lctr32_loop: movdqa .Lrev_ctr(%rip),%xmm1 movdqa %xmm14,%xmm0 movdqa %xmm15,%xmm6 .byte 102,15,56,0,193 .byte 102,15,56,0,241 call _vpaes_encrypt_core_2x movdqu (%rdi),%xmm1 movdqu 16(%rdi),%xmm2 movdqa .Lctr_add_two(%rip),%xmm3 pxor %xmm1,%xmm0 pxor %xmm2,%xmm6 paddd %xmm3,%xmm14 paddd %xmm3,%xmm15 movdqu %xmm0,(%rsi,%rdi,1) movdqu %xmm6,16(%rsi,%rdi,1) subq $2,%rcx leaq 32(%rdi),%rdi jnz .Lctr32_loop .Lctr32_done: .Lctr32_abort: .byte 0xf3,0xc3 .cfi_endproc .size vpaes_ctr32_encrypt_blocks,.-vpaes_ctr32_encrypt_blocks .type _vpaes_preheat,@function .align 16 _vpaes_preheat: .cfi_startproc leaq .Lk_s0F(%rip),%r10 movdqa -32(%r10),%xmm10 movdqa -16(%r10),%xmm11 movdqa 0(%r10),%xmm9 movdqa 48(%r10),%xmm13 movdqa 64(%r10),%xmm12 movdqa 80(%r10),%xmm15 movdqa 96(%r10),%xmm14 .byte 0xf3,0xc3 .cfi_endproc .size _vpaes_preheat,.-_vpaes_preheat .type _vpaes_consts,@object .section .rodata .align 64 _vpaes_consts: .Lk_inv: .quad 0x0E05060F0D080180, 0x040703090A0B0C02 .quad 0x01040A060F0B0780, 0x030D0E0C02050809 .Lk_s0F: .quad 0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F0F0F0F0F .Lk_ipt: .quad 0xC2B2E8985A2A7000, 0xCABAE09052227808 .quad 0x4C01307D317C4D00, 0xCD80B1FCB0FDCC81 .Lk_sb1: .quad 0xB19BE18FCB503E00, 0xA5DF7A6E142AF544 .quad 0x3618D415FAE22300, 0x3BF7CCC10D2ED9EF .Lk_sb2: .quad 0xE27A93C60B712400, 0x5EB7E955BC982FCD .quad 0x69EB88400AE12900, 0xC2A163C8AB82234A .Lk_sbo: .quad 0xD0D26D176FBDC700, 0x15AABF7AC502A878 .quad 0xCFE474A55FBB6A00, 0x8E1E90D1412B35FA .Lk_mc_forward: .quad 0x0407060500030201, 0x0C0F0E0D080B0A09 .quad 0x080B0A0904070605, 0x000302010C0F0E0D .quad 0x0C0F0E0D080B0A09, 0x0407060500030201 .quad 0x000302010C0F0E0D, 0x080B0A0904070605 .Lk_mc_backward: .quad 0x0605040702010003, 0x0E0D0C0F0A09080B .quad 0x020100030E0D0C0F, 0x0A09080B06050407 .quad 0x0E0D0C0F0A09080B, 0x0605040702010003 .quad 0x0A09080B06050407, 0x020100030E0D0C0F .Lk_sr: .quad 0x0706050403020100, 0x0F0E0D0C0B0A0908 .quad 0x030E09040F0A0500, 0x0B06010C07020D08 .quad 0x0F060D040B020900, 0x070E050C030A0108 .quad 0x0B0E0104070A0D00, 0x0306090C0F020508 .Lk_rcon: .quad 0x1F8391B9AF9DEEB6, 0x702A98084D7C7D81 .Lk_s63: .quad 0x5B5B5B5B5B5B5B5B, 0x5B5B5B5B5B5B5B5B .Lk_opt: .quad 0xFF9F4929D6B66000, 0xF7974121DEBE6808 .quad 0x01EDBD5150BCEC00, 0xE10D5DB1B05C0CE0 .Lk_deskew: .quad 0x07E4A34047A4E300, 0x1DFEB95A5DBEF91A .quad 0x5F36B5DC83EA6900, 0x2841C2ABF49D1E77 .Lk_dksd: .quad 0xFEB91A5DA3E44700, 0x0740E3A45A1DBEF9 .quad 0x41C277F4B5368300, 0x5FDC69EAAB289D1E .Lk_dksb: .quad 0x9A4FCA1F8550D500, 0x03D653861CC94C99 .quad 0x115BEDA7B6FC4A00, 0xD993256F7E3482C8 .Lk_dkse: .quad 0xD5031CCA1FC9D600, 0x53859A4C994F5086 .quad 0xA23196054FDC7BE8, 0xCD5EF96A20B31487 .Lk_dks9: .quad 0xB6116FC87ED9A700, 0x4AED933482255BFC .quad 0x4576516227143300, 0x8BB89FACE9DAFDCE .Lk_dipt: .quad 0x0F505B040B545F00, 0x154A411E114E451A .quad 0x86E383E660056500, 0x12771772F491F194 .Lk_dsb9: .quad 0x851C03539A86D600, 0xCAD51F504F994CC9 .quad 0xC03B1789ECD74900, 0x725E2C9EB2FBA565 .Lk_dsbd: .quad 0x7D57CCDFE6B1A200, 0xF56E9B13882A4439 .quad 0x3CE2FAF724C6CB00, 0x2931180D15DEEFD3 .Lk_dsbb: .quad 0xD022649296B44200, 0x602646F6B0F2D404 .quad 0xC19498A6CD596700, 0xF3FF0C3E3255AA6B .Lk_dsbe: .quad 0x46F2929626D4D000, 0x2242600464B4F6B0 .quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32 .Lk_dsbo: .quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D .quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C .Lrev_ctr: .quad 0x0706050403020100, 0x0c0d0e0f0b0a0908 .Lctr_add_one: .quad 0x0000000000000000, 0x0000000100000000 .Lctr_add_two: .quad 0x0000000000000000, 0x0000000200000000 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105,118,101,114,115,105,116,121,41,0 .align 64 .size _vpaes_consts,.-_vpaes_consts .text #endif
marvin-hansen/iggy-streaming-system
4,858
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .type beeu_mod_inverse_vartime,@function .hidden beeu_mod_inverse_vartime .globl beeu_mod_inverse_vartime .hidden beeu_mod_inverse_vartime .align 32 beeu_mod_inverse_vartime: .cfi_startproc _CET_ENDBR pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset rbp,-16 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset r12,-24 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset r13,-32 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset r14,-40 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset r15,-48 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset rbx,-56 pushq %rsi .cfi_adjust_cfa_offset 8 .cfi_offset rsi,-64 subq $80,%rsp .cfi_adjust_cfa_offset 80 movq %rdi,0(%rsp) movq $1,%r8 xorq %r9,%r9 xorq %r10,%r10 xorq %r11,%r11 xorq %rdi,%rdi xorq %r12,%r12 xorq %r13,%r13 xorq %r14,%r14 xorq %r15,%r15 xorq %rbp,%rbp vmovdqu 0(%rsi),%xmm0 vmovdqu 16(%rsi),%xmm1 vmovdqu %xmm0,48(%rsp) vmovdqu %xmm1,64(%rsp) vmovdqu 0(%rdx),%xmm0 vmovdqu 16(%rdx),%xmm1 vmovdqu %xmm0,16(%rsp) vmovdqu %xmm1,32(%rsp) .Lbeeu_loop: xorq %rbx,%rbx orq 48(%rsp),%rbx orq 56(%rsp),%rbx orq 64(%rsp),%rbx orq 72(%rsp),%rbx jz .Lbeeu_loop_end movq $1,%rcx .Lbeeu_shift_loop_XB: movq %rcx,%rbx andq 48(%rsp),%rbx jnz .Lbeeu_shift_loop_end_XB movq $1,%rbx andq %r8,%rbx jz .Lshift1_0 addq 0(%rdx),%r8 adcq 8(%rdx),%r9 adcq 16(%rdx),%r10 adcq 24(%rdx),%r11 adcq $0,%rdi .Lshift1_0: shrdq $1,%r9,%r8 shrdq $1,%r10,%r9 shrdq $1,%r11,%r10 shrdq $1,%rdi,%r11 shrq $1,%rdi shlq $1,%rcx cmpq $0x8000000,%rcx jne .Lbeeu_shift_loop_XB .Lbeeu_shift_loop_end_XB: bsfq %rcx,%rcx testq %rcx,%rcx jz .Lbeeu_no_shift_XB movq 8+48(%rsp),%rax movq 16+48(%rsp),%rbx movq 24+48(%rsp),%rsi shrdq %cl,%rax,0+48(%rsp) shrdq %cl,%rbx,8+48(%rsp) shrdq %cl,%rsi,16+48(%rsp) shrq %cl,%rsi movq %rsi,24+48(%rsp) .Lbeeu_no_shift_XB: movq $1,%rcx .Lbeeu_shift_loop_YA: movq %rcx,%rbx andq 16(%rsp),%rbx jnz .Lbeeu_shift_loop_end_YA movq $1,%rbx andq %r12,%rbx jz .Lshift1_1 addq 0(%rdx),%r12 adcq 8(%rdx),%r13 adcq 16(%rdx),%r14 adcq 24(%rdx),%r15 adcq $0,%rbp .Lshift1_1: shrdq $1,%r13,%r12 shrdq $1,%r14,%r13 shrdq $1,%r15,%r14 shrdq $1,%rbp,%r15 shrq $1,%rbp shlq $1,%rcx cmpq $0x8000000,%rcx jne .Lbeeu_shift_loop_YA .Lbeeu_shift_loop_end_YA: bsfq %rcx,%rcx testq %rcx,%rcx jz .Lbeeu_no_shift_YA movq 8+16(%rsp),%rax movq 16+16(%rsp),%rbx movq 24+16(%rsp),%rsi shrdq %cl,%rax,0+16(%rsp) shrdq %cl,%rbx,8+16(%rsp) shrdq %cl,%rsi,16+16(%rsp) shrq %cl,%rsi movq %rsi,24+16(%rsp) .Lbeeu_no_shift_YA: movq 48(%rsp),%rax movq 56(%rsp),%rbx movq 64(%rsp),%rsi movq 72(%rsp),%rcx subq 16(%rsp),%rax sbbq 24(%rsp),%rbx sbbq 32(%rsp),%rsi sbbq 40(%rsp),%rcx jnc .Lbeeu_B_bigger_than_A movq 16(%rsp),%rax movq 24(%rsp),%rbx movq 32(%rsp),%rsi movq 40(%rsp),%rcx subq 48(%rsp),%rax sbbq 56(%rsp),%rbx sbbq 64(%rsp),%rsi sbbq 72(%rsp),%rcx movq %rax,16(%rsp) movq %rbx,24(%rsp) movq %rsi,32(%rsp) movq %rcx,40(%rsp) addq %r8,%r12 adcq %r9,%r13 adcq %r10,%r14 adcq %r11,%r15 adcq %rdi,%rbp jmp .Lbeeu_loop .Lbeeu_B_bigger_than_A: movq %rax,48(%rsp) movq %rbx,56(%rsp) movq %rsi,64(%rsp) movq %rcx,72(%rsp) addq %r12,%r8 adcq %r13,%r9 adcq %r14,%r10 adcq %r15,%r11 adcq %rbp,%rdi jmp .Lbeeu_loop .Lbeeu_loop_end: movq 16(%rsp),%rbx subq $1,%rbx orq 24(%rsp),%rbx orq 32(%rsp),%rbx orq 40(%rsp),%rbx jnz .Lbeeu_err movq 0(%rdx),%r8 movq 8(%rdx),%r9 movq 16(%rdx),%r10 movq 24(%rdx),%r11 xorq %rdi,%rdi .Lbeeu_reduction_loop: movq %r12,16(%rsp) movq %r13,24(%rsp) movq %r14,32(%rsp) movq %r15,40(%rsp) movq %rbp,48(%rsp) subq %r8,%r12 sbbq %r9,%r13 sbbq %r10,%r14 sbbq %r11,%r15 sbbq $0,%rbp cmovcq 16(%rsp),%r12 cmovcq 24(%rsp),%r13 cmovcq 32(%rsp),%r14 cmovcq 40(%rsp),%r15 jnc .Lbeeu_reduction_loop subq %r12,%r8 sbbq %r13,%r9 sbbq %r14,%r10 sbbq %r15,%r11 .Lbeeu_save: movq 0(%rsp),%rdi movq %r8,0(%rdi) movq %r9,8(%rdi) movq %r10,16(%rdi) movq %r11,24(%rdi) movq $1,%rax jmp .Lbeeu_finish .Lbeeu_err: xorq %rax,%rax .Lbeeu_finish: addq $80,%rsp .cfi_adjust_cfa_offset -80 popq %rsi .cfi_adjust_cfa_offset -8 .cfi_restore rsi popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore rbx popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore r12 popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore rbp .byte 0xf3,0xc3 .cfi_endproc .size beeu_mod_inverse_vartime, .-beeu_mod_inverse_vartime #endif
marvin-hansen/iggy-streaming-system
19,720
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .type _aesni_ctr32_ghash_6x,@function .align 32 _aesni_ctr32_ghash_6x: .cfi_startproc vmovdqu 32(%r11),%xmm2 subq $6,%rdx vpxor %xmm4,%xmm4,%xmm4 vmovdqu 0-128(%rcx),%xmm15 vpaddb %xmm2,%xmm1,%xmm10 vpaddb %xmm2,%xmm10,%xmm11 vpaddb %xmm2,%xmm11,%xmm12 vpaddb %xmm2,%xmm12,%xmm13 vpaddb %xmm2,%xmm13,%xmm14 vpxor %xmm15,%xmm1,%xmm9 vmovdqu %xmm4,16+8(%rsp) jmp .Loop6x .align 32 .Loop6x: addl $100663296,%ebx jc .Lhandle_ctr32 vmovdqu 0-32(%r9),%xmm3 vpaddb %xmm2,%xmm14,%xmm1 vpxor %xmm15,%xmm10,%xmm10 vpxor %xmm15,%xmm11,%xmm11 .Lresume_ctr32: vmovdqu %xmm1,(%r8) vpclmulqdq $0x10,%xmm3,%xmm7,%xmm5 vpxor %xmm15,%xmm12,%xmm12 vmovups 16-128(%rcx),%xmm2 vpclmulqdq $0x01,%xmm3,%xmm7,%xmm6 xorq %r12,%r12 cmpq %r14,%r15 vaesenc %xmm2,%xmm9,%xmm9 vmovdqu 48+8(%rsp),%xmm0 vpxor %xmm15,%xmm13,%xmm13 vpclmulqdq $0x00,%xmm3,%xmm7,%xmm1 vaesenc %xmm2,%xmm10,%xmm10 vpxor %xmm15,%xmm14,%xmm14 setnc %r12b vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vmovdqu 16-32(%r9),%xmm3 negq %r12 vaesenc %xmm2,%xmm12,%xmm12 vpxor %xmm5,%xmm6,%xmm6 vpclmulqdq $0x00,%xmm3,%xmm0,%xmm5 vpxor %xmm4,%xmm8,%xmm8 vaesenc %xmm2,%xmm13,%xmm13 vpxor %xmm5,%xmm1,%xmm4 andq $0x60,%r12 vmovups 32-128(%rcx),%xmm15 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm1 vaesenc %xmm2,%xmm14,%xmm14 vpclmulqdq $0x01,%xmm3,%xmm0,%xmm2 leaq (%r14,%r12,1),%r14 vaesenc %xmm15,%xmm9,%xmm9 vpxor 16+8(%rsp),%xmm8,%xmm8 vpclmulqdq $0x11,%xmm3,%xmm0,%xmm3 vmovdqu 64+8(%rsp),%xmm0 vaesenc %xmm15,%xmm10,%xmm10 movbeq 88(%r14),%r13 vaesenc %xmm15,%xmm11,%xmm11 movbeq 80(%r14),%r12 vaesenc %xmm15,%xmm12,%xmm12 movq %r13,32+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 movq %r12,40+8(%rsp) vmovdqu 48-32(%r9),%xmm5 vaesenc %xmm15,%xmm14,%xmm14 vmovups 48-128(%rcx),%xmm15 vpxor %xmm1,%xmm6,%xmm6 vpclmulqdq $0x00,%xmm5,%xmm0,%xmm1 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm2,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm5,%xmm0,%xmm2 vaesenc %xmm15,%xmm10,%xmm10 vpxor %xmm3,%xmm7,%xmm7 vpclmulqdq $0x01,%xmm5,%xmm0,%xmm3 vaesenc %xmm15,%xmm11,%xmm11 vpclmulqdq $0x11,%xmm5,%xmm0,%xmm5 vmovdqu 80+8(%rsp),%xmm0 vaesenc %xmm15,%xmm12,%xmm12 vaesenc %xmm15,%xmm13,%xmm13 vpxor %xmm1,%xmm4,%xmm4 vmovdqu 64-32(%r9),%xmm1 vaesenc %xmm15,%xmm14,%xmm14 vmovups 64-128(%rcx),%xmm15 vpxor %xmm2,%xmm6,%xmm6 vpclmulqdq $0x00,%xmm1,%xmm0,%xmm2 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm3,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm1,%xmm0,%xmm3 vaesenc %xmm15,%xmm10,%xmm10 movbeq 72(%r14),%r13 vpxor %xmm5,%xmm7,%xmm7 vpclmulqdq $0x01,%xmm1,%xmm0,%xmm5 vaesenc %xmm15,%xmm11,%xmm11 movbeq 64(%r14),%r12 vpclmulqdq $0x11,%xmm1,%xmm0,%xmm1 vmovdqu 96+8(%rsp),%xmm0 vaesenc %xmm15,%xmm12,%xmm12 movq %r13,48+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 movq %r12,56+8(%rsp) vpxor %xmm2,%xmm4,%xmm4 vmovdqu 96-32(%r9),%xmm2 vaesenc %xmm15,%xmm14,%xmm14 vmovups 80-128(%rcx),%xmm15 vpxor %xmm3,%xmm6,%xmm6 vpclmulqdq $0x00,%xmm2,%xmm0,%xmm3 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm5,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm2,%xmm0,%xmm5 vaesenc %xmm15,%xmm10,%xmm10 movbeq 56(%r14),%r13 vpxor %xmm1,%xmm7,%xmm7 vpclmulqdq $0x01,%xmm2,%xmm0,%xmm1 vpxor 112+8(%rsp),%xmm8,%xmm8 vaesenc %xmm15,%xmm11,%xmm11 movbeq 48(%r14),%r12 vpclmulqdq $0x11,%xmm2,%xmm0,%xmm2 vaesenc %xmm15,%xmm12,%xmm12 movq %r13,64+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 movq %r12,72+8(%rsp) vpxor %xmm3,%xmm4,%xmm4 vmovdqu 112-32(%r9),%xmm3 vaesenc %xmm15,%xmm14,%xmm14 vmovups 96-128(%rcx),%xmm15 vpxor %xmm5,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm3,%xmm8,%xmm5 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm1,%xmm6,%xmm6 vpclmulqdq $0x01,%xmm3,%xmm8,%xmm1 vaesenc %xmm15,%xmm10,%xmm10 movbeq 40(%r14),%r13 vpxor %xmm2,%xmm7,%xmm7 vpclmulqdq $0x00,%xmm3,%xmm8,%xmm2 vaesenc %xmm15,%xmm11,%xmm11 movbeq 32(%r14),%r12 vpclmulqdq $0x11,%xmm3,%xmm8,%xmm8 vaesenc %xmm15,%xmm12,%xmm12 movq %r13,80+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 movq %r12,88+8(%rsp) vpxor %xmm5,%xmm6,%xmm6 vaesenc %xmm15,%xmm14,%xmm14 vpxor %xmm1,%xmm6,%xmm6 vmovups 112-128(%rcx),%xmm15 vpslldq $8,%xmm6,%xmm5 vpxor %xmm2,%xmm4,%xmm4 vmovdqu 16(%r11),%xmm3 vaesenc %xmm15,%xmm9,%xmm9 vpxor %xmm8,%xmm7,%xmm7 vaesenc %xmm15,%xmm10,%xmm10 vpxor %xmm5,%xmm4,%xmm4 movbeq 24(%r14),%r13 vaesenc %xmm15,%xmm11,%xmm11 movbeq 16(%r14),%r12 vpalignr $8,%xmm4,%xmm4,%xmm0 vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4 movq %r13,96+8(%rsp) vaesenc %xmm15,%xmm12,%xmm12 movq %r12,104+8(%rsp) vaesenc %xmm15,%xmm13,%xmm13 vmovups 128-128(%rcx),%xmm1 vaesenc %xmm15,%xmm14,%xmm14 vaesenc %xmm1,%xmm9,%xmm9 vmovups 144-128(%rcx),%xmm15 vaesenc %xmm1,%xmm10,%xmm10 vpsrldq $8,%xmm6,%xmm6 vaesenc %xmm1,%xmm11,%xmm11 vpxor %xmm6,%xmm7,%xmm7 vaesenc %xmm1,%xmm12,%xmm12 vpxor %xmm0,%xmm4,%xmm4 movbeq 8(%r14),%r13 vaesenc %xmm1,%xmm13,%xmm13 movbeq 0(%r14),%r12 vaesenc %xmm1,%xmm14,%xmm14 vmovups 160-128(%rcx),%xmm1 cmpl $11,%r10d jb .Lenc_tail vaesenc %xmm15,%xmm9,%xmm9 vaesenc %xmm15,%xmm10,%xmm10 vaesenc %xmm15,%xmm11,%xmm11 vaesenc %xmm15,%xmm12,%xmm12 vaesenc %xmm15,%xmm13,%xmm13 vaesenc %xmm15,%xmm14,%xmm14 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovups 176-128(%rcx),%xmm15 vaesenc %xmm1,%xmm14,%xmm14 vmovups 192-128(%rcx),%xmm1 je .Lenc_tail vaesenc %xmm15,%xmm9,%xmm9 vaesenc %xmm15,%xmm10,%xmm10 vaesenc %xmm15,%xmm11,%xmm11 vaesenc %xmm15,%xmm12,%xmm12 vaesenc %xmm15,%xmm13,%xmm13 vaesenc %xmm15,%xmm14,%xmm14 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovups 208-128(%rcx),%xmm15 vaesenc %xmm1,%xmm14,%xmm14 vmovups 224-128(%rcx),%xmm1 jmp .Lenc_tail .align 32 .Lhandle_ctr32: vmovdqu (%r11),%xmm0 vpshufb %xmm0,%xmm1,%xmm6 vmovdqu 48(%r11),%xmm5 vpaddd 64(%r11),%xmm6,%xmm10 vpaddd %xmm5,%xmm6,%xmm11 vmovdqu 0-32(%r9),%xmm3 vpaddd %xmm5,%xmm10,%xmm12 vpshufb %xmm0,%xmm10,%xmm10 vpaddd %xmm5,%xmm11,%xmm13 vpshufb %xmm0,%xmm11,%xmm11 vpxor %xmm15,%xmm10,%xmm10 vpaddd %xmm5,%xmm12,%xmm14 vpshufb %xmm0,%xmm12,%xmm12 vpxor %xmm15,%xmm11,%xmm11 vpaddd %xmm5,%xmm13,%xmm1 vpshufb %xmm0,%xmm13,%xmm13 vpshufb %xmm0,%xmm14,%xmm14 vpshufb %xmm0,%xmm1,%xmm1 jmp .Lresume_ctr32 .align 32 .Lenc_tail: vaesenc %xmm15,%xmm9,%xmm9 vmovdqu %xmm7,16+8(%rsp) vpalignr $8,%xmm4,%xmm4,%xmm8 vaesenc %xmm15,%xmm10,%xmm10 vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4 vpxor 0(%rdi),%xmm1,%xmm2 vaesenc %xmm15,%xmm11,%xmm11 vpxor 16(%rdi),%xmm1,%xmm0 vaesenc %xmm15,%xmm12,%xmm12 vpxor 32(%rdi),%xmm1,%xmm5 vaesenc %xmm15,%xmm13,%xmm13 vpxor 48(%rdi),%xmm1,%xmm6 vaesenc %xmm15,%xmm14,%xmm14 vpxor 64(%rdi),%xmm1,%xmm7 vpxor 80(%rdi),%xmm1,%xmm3 vmovdqu (%r8),%xmm1 vaesenclast %xmm2,%xmm9,%xmm9 vmovdqu 32(%r11),%xmm2 vaesenclast %xmm0,%xmm10,%xmm10 vpaddb %xmm2,%xmm1,%xmm0 movq %r13,112+8(%rsp) leaq 96(%rdi),%rdi prefetcht0 512(%rdi) prefetcht0 576(%rdi) vaesenclast %xmm5,%xmm11,%xmm11 vpaddb %xmm2,%xmm0,%xmm5 movq %r12,120+8(%rsp) leaq 96(%rsi),%rsi vmovdqu 0-128(%rcx),%xmm15 vaesenclast %xmm6,%xmm12,%xmm12 vpaddb %xmm2,%xmm5,%xmm6 vaesenclast %xmm7,%xmm13,%xmm13 vpaddb %xmm2,%xmm6,%xmm7 vaesenclast %xmm3,%xmm14,%xmm14 vpaddb %xmm2,%xmm7,%xmm3 addq $0x60,%rax subq $0x6,%rdx jc .L6x_done vmovups %xmm9,-96(%rsi) vpxor %xmm15,%xmm1,%xmm9 vmovups %xmm10,-80(%rsi) vmovdqa %xmm0,%xmm10 vmovups %xmm11,-64(%rsi) vmovdqa %xmm5,%xmm11 vmovups %xmm12,-48(%rsi) vmovdqa %xmm6,%xmm12 vmovups %xmm13,-32(%rsi) vmovdqa %xmm7,%xmm13 vmovups %xmm14,-16(%rsi) vmovdqa %xmm3,%xmm14 vmovdqu 32+8(%rsp),%xmm7 jmp .Loop6x .L6x_done: vpxor 16+8(%rsp),%xmm8,%xmm8 vpxor %xmm4,%xmm8,%xmm8 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_ctr32_ghash_6x,.-_aesni_ctr32_ghash_6x .globl aesni_gcm_decrypt .hidden aesni_gcm_decrypt .type aesni_gcm_decrypt,@function .align 32 aesni_gcm_decrypt: .cfi_startproc _CET_ENDBR xorq %rax,%rax cmpq $0x60,%rdx jb .Lgcm_dec_abort pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 movq %rsp,%rbp .cfi_def_cfa_register %rbp pushq %rbx .cfi_offset %rbx,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 vzeroupper movq 16(%rbp),%r12 vmovdqu (%r8),%xmm1 addq $-128,%rsp movl 12(%r8),%ebx leaq .Lbswap_mask(%rip),%r11 leaq -128(%rcx),%r14 movq $0xf80,%r15 vmovdqu (%r12),%xmm8 andq $-128,%rsp vmovdqu (%r11),%xmm0 leaq 128(%rcx),%rcx leaq 32(%r9),%r9 movl 240-128(%rcx),%r10d vpshufb %xmm0,%xmm8,%xmm8 andq %r15,%r14 andq %rsp,%r15 subq %r14,%r15 jc .Ldec_no_key_aliasing cmpq $768,%r15 jnc .Ldec_no_key_aliasing subq %r15,%rsp .Ldec_no_key_aliasing: vmovdqu 80(%rdi),%xmm7 movq %rdi,%r14 vmovdqu 64(%rdi),%xmm4 leaq -192(%rdi,%rdx,1),%r15 vmovdqu 48(%rdi),%xmm5 shrq $4,%rdx xorq %rax,%rax vmovdqu 32(%rdi),%xmm6 vpshufb %xmm0,%xmm7,%xmm7 vmovdqu 16(%rdi),%xmm2 vpshufb %xmm0,%xmm4,%xmm4 vmovdqu (%rdi),%xmm3 vpshufb %xmm0,%xmm5,%xmm5 vmovdqu %xmm4,48(%rsp) vpshufb %xmm0,%xmm6,%xmm6 vmovdqu %xmm5,64(%rsp) vpshufb %xmm0,%xmm2,%xmm2 vmovdqu %xmm6,80(%rsp) vpshufb %xmm0,%xmm3,%xmm3 vmovdqu %xmm2,96(%rsp) vmovdqu %xmm3,112(%rsp) call _aesni_ctr32_ghash_6x movq 16(%rbp),%r12 vmovups %xmm9,-96(%rsi) vmovups %xmm10,-80(%rsi) vmovups %xmm11,-64(%rsi) vmovups %xmm12,-48(%rsi) vmovups %xmm13,-32(%rsi) vmovups %xmm14,-16(%rsi) vpshufb (%r11),%xmm8,%xmm8 vmovdqu %xmm8,(%r12) vzeroupper leaq -40(%rbp),%rsp .cfi_def_cfa %rsp, 0x38 popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore %r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore %r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore %rbx popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp .Lgcm_dec_abort: .byte 0xf3,0xc3 .cfi_endproc .size aesni_gcm_decrypt,.-aesni_gcm_decrypt .type _aesni_ctr32_6x,@function .align 32 _aesni_ctr32_6x: .cfi_startproc vmovdqu 0-128(%rcx),%xmm4 vmovdqu 32(%r11),%xmm2 leaq -1(%r10),%r13 vmovups 16-128(%rcx),%xmm15 leaq 32-128(%rcx),%r12 vpxor %xmm4,%xmm1,%xmm9 addl $100663296,%ebx jc .Lhandle_ctr32_2 vpaddb %xmm2,%xmm1,%xmm10 vpaddb %xmm2,%xmm10,%xmm11 vpxor %xmm4,%xmm10,%xmm10 vpaddb %xmm2,%xmm11,%xmm12 vpxor %xmm4,%xmm11,%xmm11 vpaddb %xmm2,%xmm12,%xmm13 vpxor %xmm4,%xmm12,%xmm12 vpaddb %xmm2,%xmm13,%xmm14 vpxor %xmm4,%xmm13,%xmm13 vpaddb %xmm2,%xmm14,%xmm1 vpxor %xmm4,%xmm14,%xmm14 jmp .Loop_ctr32 .align 16 .Loop_ctr32: vaesenc %xmm15,%xmm9,%xmm9 vaesenc %xmm15,%xmm10,%xmm10 vaesenc %xmm15,%xmm11,%xmm11 vaesenc %xmm15,%xmm12,%xmm12 vaesenc %xmm15,%xmm13,%xmm13 vaesenc %xmm15,%xmm14,%xmm14 vmovups (%r12),%xmm15 leaq 16(%r12),%r12 decl %r13d jnz .Loop_ctr32 vmovdqu (%r12),%xmm3 vaesenc %xmm15,%xmm9,%xmm9 vpxor 0(%rdi),%xmm3,%xmm4 vaesenc %xmm15,%xmm10,%xmm10 vpxor 16(%rdi),%xmm3,%xmm5 vaesenc %xmm15,%xmm11,%xmm11 vpxor 32(%rdi),%xmm3,%xmm6 vaesenc %xmm15,%xmm12,%xmm12 vpxor 48(%rdi),%xmm3,%xmm8 vaesenc %xmm15,%xmm13,%xmm13 vpxor 64(%rdi),%xmm3,%xmm2 vaesenc %xmm15,%xmm14,%xmm14 vpxor 80(%rdi),%xmm3,%xmm3 leaq 96(%rdi),%rdi vaesenclast %xmm4,%xmm9,%xmm9 vaesenclast %xmm5,%xmm10,%xmm10 vaesenclast %xmm6,%xmm11,%xmm11 vaesenclast %xmm8,%xmm12,%xmm12 vaesenclast %xmm2,%xmm13,%xmm13 vaesenclast %xmm3,%xmm14,%xmm14 vmovups %xmm9,0(%rsi) vmovups %xmm10,16(%rsi) vmovups %xmm11,32(%rsi) vmovups %xmm12,48(%rsi) vmovups %xmm13,64(%rsi) vmovups %xmm14,80(%rsi) leaq 96(%rsi),%rsi .byte 0xf3,0xc3 .align 32 .Lhandle_ctr32_2: vpshufb %xmm0,%xmm1,%xmm6 vmovdqu 48(%r11),%xmm5 vpaddd 64(%r11),%xmm6,%xmm10 vpaddd %xmm5,%xmm6,%xmm11 vpaddd %xmm5,%xmm10,%xmm12 vpshufb %xmm0,%xmm10,%xmm10 vpaddd %xmm5,%xmm11,%xmm13 vpshufb %xmm0,%xmm11,%xmm11 vpxor %xmm4,%xmm10,%xmm10 vpaddd %xmm5,%xmm12,%xmm14 vpshufb %xmm0,%xmm12,%xmm12 vpxor %xmm4,%xmm11,%xmm11 vpaddd %xmm5,%xmm13,%xmm1 vpshufb %xmm0,%xmm13,%xmm13 vpxor %xmm4,%xmm12,%xmm12 vpshufb %xmm0,%xmm14,%xmm14 vpxor %xmm4,%xmm13,%xmm13 vpshufb %xmm0,%xmm1,%xmm1 vpxor %xmm4,%xmm14,%xmm14 jmp .Loop_ctr32 .cfi_endproc .size _aesni_ctr32_6x,.-_aesni_ctr32_6x .globl aesni_gcm_encrypt .hidden aesni_gcm_encrypt .type aesni_gcm_encrypt,@function .align 32 aesni_gcm_encrypt: .cfi_startproc _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST .extern BORINGSSL_function_hit .hidden BORINGSSL_function_hit movb $1,BORINGSSL_function_hit+2(%rip) #endif xorq %rax,%rax cmpq $288,%rdx jb .Lgcm_enc_abort pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 movq %rsp,%rbp .cfi_def_cfa_register %rbp pushq %rbx .cfi_offset %rbx,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 vzeroupper vmovdqu (%r8),%xmm1 addq $-128,%rsp movl 12(%r8),%ebx leaq .Lbswap_mask(%rip),%r11 leaq -128(%rcx),%r14 movq $0xf80,%r15 leaq 128(%rcx),%rcx vmovdqu (%r11),%xmm0 andq $-128,%rsp movl 240-128(%rcx),%r10d andq %r15,%r14 andq %rsp,%r15 subq %r14,%r15 jc .Lenc_no_key_aliasing cmpq $768,%r15 jnc .Lenc_no_key_aliasing subq %r15,%rsp .Lenc_no_key_aliasing: movq %rsi,%r14 leaq -192(%rsi,%rdx,1),%r15 shrq $4,%rdx call _aesni_ctr32_6x vpshufb %xmm0,%xmm9,%xmm8 vpshufb %xmm0,%xmm10,%xmm2 vmovdqu %xmm8,112(%rsp) vpshufb %xmm0,%xmm11,%xmm4 vmovdqu %xmm2,96(%rsp) vpshufb %xmm0,%xmm12,%xmm5 vmovdqu %xmm4,80(%rsp) vpshufb %xmm0,%xmm13,%xmm6 vmovdqu %xmm5,64(%rsp) vpshufb %xmm0,%xmm14,%xmm7 vmovdqu %xmm6,48(%rsp) call _aesni_ctr32_6x movq 16(%rbp),%r12 leaq 32(%r9),%r9 vmovdqu (%r12),%xmm8 subq $12,%rdx movq $192,%rax vpshufb %xmm0,%xmm8,%xmm8 call _aesni_ctr32_ghash_6x vmovdqu 32(%rsp),%xmm7 vmovdqu (%r11),%xmm0 vmovdqu 0-32(%r9),%xmm3 vpunpckhqdq %xmm7,%xmm7,%xmm1 vmovdqu 32-32(%r9),%xmm15 vmovups %xmm9,-96(%rsi) vpshufb %xmm0,%xmm9,%xmm9 vpxor %xmm7,%xmm1,%xmm1 vmovups %xmm10,-80(%rsi) vpshufb %xmm0,%xmm10,%xmm10 vmovups %xmm11,-64(%rsi) vpshufb %xmm0,%xmm11,%xmm11 vmovups %xmm12,-48(%rsi) vpshufb %xmm0,%xmm12,%xmm12 vmovups %xmm13,-32(%rsi) vpshufb %xmm0,%xmm13,%xmm13 vmovups %xmm14,-16(%rsi) vpshufb %xmm0,%xmm14,%xmm14 vmovdqu %xmm9,16(%rsp) vmovdqu 48(%rsp),%xmm6 vmovdqu 16-32(%r9),%xmm0 vpunpckhqdq %xmm6,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm3,%xmm7,%xmm5 vpxor %xmm6,%xmm2,%xmm2 vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7 vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1 vmovdqu 64(%rsp),%xmm9 vpclmulqdq $0x00,%xmm0,%xmm6,%xmm4 vmovdqu 48-32(%r9),%xmm3 vpxor %xmm5,%xmm4,%xmm4 vpunpckhqdq %xmm9,%xmm9,%xmm5 vpclmulqdq $0x11,%xmm0,%xmm6,%xmm6 vpxor %xmm9,%xmm5,%xmm5 vpxor %xmm7,%xmm6,%xmm6 vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2 vmovdqu 80-32(%r9),%xmm15 vpxor %xmm1,%xmm2,%xmm2 vmovdqu 80(%rsp),%xmm1 vpclmulqdq $0x00,%xmm3,%xmm9,%xmm7 vmovdqu 64-32(%r9),%xmm0 vpxor %xmm4,%xmm7,%xmm7 vpunpckhqdq %xmm1,%xmm1,%xmm4 vpclmulqdq $0x11,%xmm3,%xmm9,%xmm9 vpxor %xmm1,%xmm4,%xmm4 vpxor %xmm6,%xmm9,%xmm9 vpclmulqdq $0x00,%xmm15,%xmm5,%xmm5 vpxor %xmm2,%xmm5,%xmm5 vmovdqu 96(%rsp),%xmm2 vpclmulqdq $0x00,%xmm0,%xmm1,%xmm6 vmovdqu 96-32(%r9),%xmm3 vpxor %xmm7,%xmm6,%xmm6 vpunpckhqdq %xmm2,%xmm2,%xmm7 vpclmulqdq $0x11,%xmm0,%xmm1,%xmm1 vpxor %xmm2,%xmm7,%xmm7 vpxor %xmm9,%xmm1,%xmm1 vpclmulqdq $0x10,%xmm15,%xmm4,%xmm4 vmovdqu 128-32(%r9),%xmm15 vpxor %xmm5,%xmm4,%xmm4 vpxor 112(%rsp),%xmm8,%xmm8 vpclmulqdq $0x00,%xmm3,%xmm2,%xmm5 vmovdqu 112-32(%r9),%xmm0 vpunpckhqdq %xmm8,%xmm8,%xmm9 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x11,%xmm3,%xmm2,%xmm2 vpxor %xmm8,%xmm9,%xmm9 vpxor %xmm1,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm15,%xmm7,%xmm7 vpxor %xmm4,%xmm7,%xmm4 vpclmulqdq $0x00,%xmm0,%xmm8,%xmm6 vmovdqu 0-32(%r9),%xmm3 vpunpckhqdq %xmm14,%xmm14,%xmm1 vpclmulqdq $0x11,%xmm0,%xmm8,%xmm8 vpxor %xmm14,%xmm1,%xmm1 vpxor %xmm5,%xmm6,%xmm5 vpclmulqdq $0x10,%xmm15,%xmm9,%xmm9 vmovdqu 32-32(%r9),%xmm15 vpxor %xmm2,%xmm8,%xmm7 vpxor %xmm4,%xmm9,%xmm6 vmovdqu 16-32(%r9),%xmm0 vpxor %xmm5,%xmm7,%xmm9 vpclmulqdq $0x00,%xmm3,%xmm14,%xmm4 vpxor %xmm9,%xmm6,%xmm6 vpunpckhqdq %xmm13,%xmm13,%xmm2 vpclmulqdq $0x11,%xmm3,%xmm14,%xmm14 vpxor %xmm13,%xmm2,%xmm2 vpslldq $8,%xmm6,%xmm9 vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1 vpxor %xmm9,%xmm5,%xmm8 vpsrldq $8,%xmm6,%xmm6 vpxor %xmm6,%xmm7,%xmm7 vpclmulqdq $0x00,%xmm0,%xmm13,%xmm5 vmovdqu 48-32(%r9),%xmm3 vpxor %xmm4,%xmm5,%xmm5 vpunpckhqdq %xmm12,%xmm12,%xmm9 vpclmulqdq $0x11,%xmm0,%xmm13,%xmm13 vpxor %xmm12,%xmm9,%xmm9 vpxor %xmm14,%xmm13,%xmm13 vpalignr $8,%xmm8,%xmm8,%xmm14 vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2 vmovdqu 80-32(%r9),%xmm15 vpxor %xmm1,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm3,%xmm12,%xmm4 vmovdqu 64-32(%r9),%xmm0 vpxor %xmm5,%xmm4,%xmm4 vpunpckhqdq %xmm11,%xmm11,%xmm1 vpclmulqdq $0x11,%xmm3,%xmm12,%xmm12 vpxor %xmm11,%xmm1,%xmm1 vpxor %xmm13,%xmm12,%xmm12 vxorps 16(%rsp),%xmm7,%xmm7 vpclmulqdq $0x00,%xmm15,%xmm9,%xmm9 vpxor %xmm2,%xmm9,%xmm9 vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8 vxorps %xmm14,%xmm8,%xmm8 vpclmulqdq $0x00,%xmm0,%xmm11,%xmm5 vmovdqu 96-32(%r9),%xmm3 vpxor %xmm4,%xmm5,%xmm5 vpunpckhqdq %xmm10,%xmm10,%xmm2 vpclmulqdq $0x11,%xmm0,%xmm11,%xmm11 vpxor %xmm10,%xmm2,%xmm2 vpalignr $8,%xmm8,%xmm8,%xmm14 vpxor %xmm12,%xmm11,%xmm11 vpclmulqdq $0x10,%xmm15,%xmm1,%xmm1 vmovdqu 128-32(%r9),%xmm15 vpxor %xmm9,%xmm1,%xmm1 vxorps %xmm7,%xmm14,%xmm14 vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8 vxorps %xmm14,%xmm8,%xmm8 vpclmulqdq $0x00,%xmm3,%xmm10,%xmm4 vmovdqu 112-32(%r9),%xmm0 vpxor %xmm5,%xmm4,%xmm4 vpunpckhqdq %xmm8,%xmm8,%xmm9 vpclmulqdq $0x11,%xmm3,%xmm10,%xmm10 vpxor %xmm8,%xmm9,%xmm9 vpxor %xmm11,%xmm10,%xmm10 vpclmulqdq $0x00,%xmm15,%xmm2,%xmm2 vpxor %xmm1,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm0,%xmm8,%xmm5 vpclmulqdq $0x11,%xmm0,%xmm8,%xmm7 vpxor %xmm4,%xmm5,%xmm5 vpclmulqdq $0x10,%xmm15,%xmm9,%xmm6 vpxor %xmm10,%xmm7,%xmm7 vpxor %xmm2,%xmm6,%xmm6 vpxor %xmm5,%xmm7,%xmm4 vpxor %xmm4,%xmm6,%xmm6 vpslldq $8,%xmm6,%xmm1 vmovdqu 16(%r11),%xmm3 vpsrldq $8,%xmm6,%xmm6 vpxor %xmm1,%xmm5,%xmm8 vpxor %xmm6,%xmm7,%xmm7 vpalignr $8,%xmm8,%xmm8,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8 vpxor %xmm2,%xmm8,%xmm8 vpalignr $8,%xmm8,%xmm8,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8 vpxor %xmm7,%xmm2,%xmm2 vpxor %xmm2,%xmm8,%xmm8 movq 16(%rbp),%r12 vpshufb (%r11),%xmm8,%xmm8 vmovdqu %xmm8,(%r12) vzeroupper leaq -40(%rbp),%rsp .cfi_def_cfa %rsp, 0x38 popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore %r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore %r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore %rbx popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp .Lgcm_enc_abort: .byte 0xf3,0xc3 .cfi_endproc .size aesni_gcm_encrypt,.-aesni_gcm_encrypt .section .rodata .align 64 .Lbswap_mask: .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 .Lpoly: .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2 .Lone_msb: .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 .Ltwo_lsb: .byte 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 .Lone_lsb: .byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 .byte 65,69,83,45,78,73,32,71,67,77,32,109,111,100,117,108,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 64 .text #endif
marvin-hansen/iggy-streaming-system
1,170
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/rdrand-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .globl CRYPTO_rdrand .hidden CRYPTO_rdrand .type CRYPTO_rdrand,@function .align 16 CRYPTO_rdrand: .cfi_startproc _CET_ENDBR xorq %rax,%rax .byte 72,15,199,242 testq %rdx,%rdx jz .Lerr cmpq $-1,%rdx je .Lerr adcq %rax,%rax movq %rdx,0(%rdi) .byte 0xf3,0xc3 .Lerr: xorq %rax,%rax .byte 0xf3,0xc3 .cfi_endproc .size CRYPTO_rdrand,.-CRYPTO_rdrand .globl CRYPTO_rdrand_multiple8_buf .hidden CRYPTO_rdrand_multiple8_buf .type CRYPTO_rdrand_multiple8_buf,@function .align 16 CRYPTO_rdrand_multiple8_buf: .cfi_startproc _CET_ENDBR testq %rsi,%rsi jz .Lout movq $8,%rdx .Lloop: .byte 72,15,199,241 jnc .Lerr_multiple testq %rcx,%rcx jz .Lerr_multiple cmpq $-1,%rcx je .Lerr_multiple movq %rcx,0(%rdi) addq %rdx,%rdi subq %rdx,%rsi jnz .Lloop .Lout: movq $1,%rax .byte 0xf3,0xc3 .Lerr_multiple: xorq %rax,%rax .byte 0xf3,0xc3 .cfi_endproc .size CRYPTO_rdrand_multiple8_buf,.-CRYPTO_rdrand_multiple8_buf #endif
marvin-hansen/iggy-streaming-system
102,042
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/sha1-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .globl sha1_block_data_order_nohw .hidden sha1_block_data_order_nohw .type sha1_block_data_order_nohw,@function .align 16 sha1_block_data_order_nohw: .cfi_startproc _CET_ENDBR movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 movq %rdi,%r8 subq $72,%rsp movq %rsi,%r9 andq $-64,%rsp movq %rdx,%r10 movq %rax,64(%rsp) .cfi_escape 0x0f,0x06,0x77,0xc0,0x00,0x06,0x23,0x08 .Lprologue: movl 0(%r8),%esi movl 4(%r8),%edi movl 8(%r8),%r11d movl 12(%r8),%r12d movl 16(%r8),%r13d jmp .Lloop .align 16 .Lloop: movl 0(%r9),%edx bswapl %edx movl 4(%r9),%ebp movl %r12d,%eax movl %edx,0(%rsp) movl %esi,%ecx bswapl %ebp xorl %r11d,%eax roll $5,%ecx andl %edi,%eax leal 1518500249(%rdx,%r13,1),%r13d addl %ecx,%r13d xorl %r12d,%eax roll $30,%edi addl %eax,%r13d movl 8(%r9),%r14d movl %r11d,%eax movl %ebp,4(%rsp) movl %r13d,%ecx bswapl %r14d xorl %edi,%eax roll $5,%ecx andl %esi,%eax leal 1518500249(%rbp,%r12,1),%r12d addl %ecx,%r12d xorl %r11d,%eax roll $30,%esi addl %eax,%r12d movl 12(%r9),%edx movl %edi,%eax movl %r14d,8(%rsp) movl %r12d,%ecx bswapl %edx xorl %esi,%eax roll $5,%ecx andl %r13d,%eax leal 1518500249(%r14,%r11,1),%r11d addl %ecx,%r11d xorl %edi,%eax roll $30,%r13d addl %eax,%r11d movl 16(%r9),%ebp movl %esi,%eax movl %edx,12(%rsp) movl %r11d,%ecx bswapl %ebp xorl %r13d,%eax roll $5,%ecx andl %r12d,%eax leal 1518500249(%rdx,%rdi,1),%edi addl %ecx,%edi xorl %esi,%eax roll $30,%r12d addl %eax,%edi movl 20(%r9),%r14d movl %r13d,%eax movl %ebp,16(%rsp) movl %edi,%ecx bswapl %r14d xorl %r12d,%eax roll $5,%ecx andl %r11d,%eax leal 1518500249(%rbp,%rsi,1),%esi addl %ecx,%esi xorl %r13d,%eax roll $30,%r11d addl %eax,%esi movl 24(%r9),%edx movl %r12d,%eax movl %r14d,20(%rsp) movl %esi,%ecx bswapl %edx xorl %r11d,%eax roll $5,%ecx andl %edi,%eax leal 1518500249(%r14,%r13,1),%r13d addl %ecx,%r13d xorl %r12d,%eax roll $30,%edi addl %eax,%r13d movl 28(%r9),%ebp movl %r11d,%eax movl %edx,24(%rsp) movl %r13d,%ecx bswapl %ebp xorl %edi,%eax roll $5,%ecx andl %esi,%eax leal 1518500249(%rdx,%r12,1),%r12d addl %ecx,%r12d xorl %r11d,%eax roll $30,%esi addl %eax,%r12d movl 32(%r9),%r14d movl %edi,%eax movl %ebp,28(%rsp) movl %r12d,%ecx bswapl %r14d xorl %esi,%eax roll $5,%ecx andl %r13d,%eax leal 1518500249(%rbp,%r11,1),%r11d addl %ecx,%r11d xorl %edi,%eax roll $30,%r13d addl %eax,%r11d movl 36(%r9),%edx movl %esi,%eax movl %r14d,32(%rsp) movl %r11d,%ecx bswapl %edx xorl %r13d,%eax roll $5,%ecx andl %r12d,%eax leal 1518500249(%r14,%rdi,1),%edi addl %ecx,%edi xorl %esi,%eax roll $30,%r12d addl %eax,%edi movl 40(%r9),%ebp movl %r13d,%eax movl %edx,36(%rsp) movl %edi,%ecx bswapl %ebp xorl %r12d,%eax roll $5,%ecx andl %r11d,%eax leal 1518500249(%rdx,%rsi,1),%esi addl %ecx,%esi xorl %r13d,%eax roll $30,%r11d addl %eax,%esi movl 44(%r9),%r14d movl %r12d,%eax movl %ebp,40(%rsp) movl %esi,%ecx bswapl %r14d xorl %r11d,%eax roll $5,%ecx andl %edi,%eax leal 1518500249(%rbp,%r13,1),%r13d addl %ecx,%r13d xorl %r12d,%eax roll $30,%edi addl %eax,%r13d movl 48(%r9),%edx movl %r11d,%eax movl %r14d,44(%rsp) movl %r13d,%ecx bswapl %edx xorl %edi,%eax roll $5,%ecx andl %esi,%eax leal 1518500249(%r14,%r12,1),%r12d addl %ecx,%r12d xorl %r11d,%eax roll $30,%esi addl %eax,%r12d movl 52(%r9),%ebp movl %edi,%eax movl %edx,48(%rsp) movl %r12d,%ecx bswapl %ebp xorl %esi,%eax roll $5,%ecx andl %r13d,%eax leal 1518500249(%rdx,%r11,1),%r11d addl %ecx,%r11d xorl %edi,%eax roll $30,%r13d addl %eax,%r11d movl 56(%r9),%r14d movl %esi,%eax movl %ebp,52(%rsp) movl %r11d,%ecx bswapl %r14d xorl %r13d,%eax roll $5,%ecx andl %r12d,%eax leal 1518500249(%rbp,%rdi,1),%edi addl %ecx,%edi xorl %esi,%eax roll $30,%r12d addl %eax,%edi movl 60(%r9),%edx movl %r13d,%eax movl %r14d,56(%rsp) movl %edi,%ecx bswapl %edx xorl %r12d,%eax roll $5,%ecx andl %r11d,%eax leal 1518500249(%r14,%rsi,1),%esi addl %ecx,%esi xorl %r13d,%eax roll $30,%r11d addl %eax,%esi xorl 0(%rsp),%ebp movl %r12d,%eax movl %edx,60(%rsp) movl %esi,%ecx xorl 8(%rsp),%ebp xorl %r11d,%eax roll $5,%ecx xorl 32(%rsp),%ebp andl %edi,%eax leal 1518500249(%rdx,%r13,1),%r13d roll $30,%edi xorl %r12d,%eax addl %ecx,%r13d roll $1,%ebp addl %eax,%r13d xorl 4(%rsp),%r14d movl %r11d,%eax movl %ebp,0(%rsp) movl %r13d,%ecx xorl 12(%rsp),%r14d xorl %edi,%eax roll $5,%ecx xorl 36(%rsp),%r14d andl %esi,%eax leal 1518500249(%rbp,%r12,1),%r12d roll $30,%esi xorl %r11d,%eax addl %ecx,%r12d roll $1,%r14d addl %eax,%r12d xorl 8(%rsp),%edx movl %edi,%eax movl %r14d,4(%rsp) movl %r12d,%ecx xorl 16(%rsp),%edx xorl %esi,%eax roll $5,%ecx xorl 40(%rsp),%edx andl %r13d,%eax leal 1518500249(%r14,%r11,1),%r11d roll $30,%r13d xorl %edi,%eax addl %ecx,%r11d roll $1,%edx addl %eax,%r11d xorl 12(%rsp),%ebp movl %esi,%eax movl %edx,8(%rsp) movl %r11d,%ecx xorl 20(%rsp),%ebp xorl %r13d,%eax roll $5,%ecx xorl 44(%rsp),%ebp andl %r12d,%eax leal 1518500249(%rdx,%rdi,1),%edi roll $30,%r12d xorl %esi,%eax addl %ecx,%edi roll $1,%ebp addl %eax,%edi xorl 16(%rsp),%r14d movl %r13d,%eax movl %ebp,12(%rsp) movl %edi,%ecx xorl 24(%rsp),%r14d xorl %r12d,%eax roll $5,%ecx xorl 48(%rsp),%r14d andl %r11d,%eax leal 1518500249(%rbp,%rsi,1),%esi roll $30,%r11d xorl %r13d,%eax addl %ecx,%esi roll $1,%r14d addl %eax,%esi xorl 20(%rsp),%edx movl %edi,%eax movl %r14d,16(%rsp) movl %esi,%ecx xorl 28(%rsp),%edx xorl %r12d,%eax roll $5,%ecx xorl 52(%rsp),%edx leal 1859775393(%r14,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%edx xorl 24(%rsp),%ebp movl %esi,%eax movl %edx,20(%rsp) movl %r13d,%ecx xorl 32(%rsp),%ebp xorl %r11d,%eax roll $5,%ecx xorl 56(%rsp),%ebp leal 1859775393(%rdx,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%ebp xorl 28(%rsp),%r14d movl %r13d,%eax movl %ebp,24(%rsp) movl %r12d,%ecx xorl 36(%rsp),%r14d xorl %edi,%eax roll $5,%ecx xorl 60(%rsp),%r14d leal 1859775393(%rbp,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%r14d xorl 32(%rsp),%edx movl %r12d,%eax movl %r14d,28(%rsp) movl %r11d,%ecx xorl 40(%rsp),%edx xorl %esi,%eax roll $5,%ecx xorl 0(%rsp),%edx leal 1859775393(%r14,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%edx xorl 36(%rsp),%ebp movl %r11d,%eax movl %edx,32(%rsp) movl %edi,%ecx xorl 44(%rsp),%ebp xorl %r13d,%eax roll $5,%ecx xorl 4(%rsp),%ebp leal 1859775393(%rdx,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%ebp xorl 40(%rsp),%r14d movl %edi,%eax movl %ebp,36(%rsp) movl %esi,%ecx xorl 48(%rsp),%r14d xorl %r12d,%eax roll $5,%ecx xorl 8(%rsp),%r14d leal 1859775393(%rbp,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%r14d xorl 44(%rsp),%edx movl %esi,%eax movl %r14d,40(%rsp) movl %r13d,%ecx xorl 52(%rsp),%edx xorl %r11d,%eax roll $5,%ecx xorl 12(%rsp),%edx leal 1859775393(%r14,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%edx xorl 48(%rsp),%ebp movl %r13d,%eax movl %edx,44(%rsp) movl %r12d,%ecx xorl 56(%rsp),%ebp xorl %edi,%eax roll $5,%ecx xorl 16(%rsp),%ebp leal 1859775393(%rdx,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%ebp xorl 52(%rsp),%r14d movl %r12d,%eax movl %ebp,48(%rsp) movl %r11d,%ecx xorl 60(%rsp),%r14d xorl %esi,%eax roll $5,%ecx xorl 20(%rsp),%r14d leal 1859775393(%rbp,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%r14d xorl 56(%rsp),%edx movl %r11d,%eax movl %r14d,52(%rsp) movl %edi,%ecx xorl 0(%rsp),%edx xorl %r13d,%eax roll $5,%ecx xorl 24(%rsp),%edx leal 1859775393(%r14,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%edx xorl 60(%rsp),%ebp movl %edi,%eax movl %edx,56(%rsp) movl %esi,%ecx xorl 4(%rsp),%ebp xorl %r12d,%eax roll $5,%ecx xorl 28(%rsp),%ebp leal 1859775393(%rdx,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%ebp xorl 0(%rsp),%r14d movl %esi,%eax movl %ebp,60(%rsp) movl %r13d,%ecx xorl 8(%rsp),%r14d xorl %r11d,%eax roll $5,%ecx xorl 32(%rsp),%r14d leal 1859775393(%rbp,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%r14d xorl 4(%rsp),%edx movl %r13d,%eax movl %r14d,0(%rsp) movl %r12d,%ecx xorl 12(%rsp),%edx xorl %edi,%eax roll $5,%ecx xorl 36(%rsp),%edx leal 1859775393(%r14,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%edx xorl 8(%rsp),%ebp movl %r12d,%eax movl %edx,4(%rsp) movl %r11d,%ecx xorl 16(%rsp),%ebp xorl %esi,%eax roll $5,%ecx xorl 40(%rsp),%ebp leal 1859775393(%rdx,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%ebp xorl 12(%rsp),%r14d movl %r11d,%eax movl %ebp,8(%rsp) movl %edi,%ecx xorl 20(%rsp),%r14d xorl %r13d,%eax roll $5,%ecx xorl 44(%rsp),%r14d leal 1859775393(%rbp,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%r14d xorl 16(%rsp),%edx movl %edi,%eax movl %r14d,12(%rsp) movl %esi,%ecx xorl 24(%rsp),%edx xorl %r12d,%eax roll $5,%ecx xorl 48(%rsp),%edx leal 1859775393(%r14,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%edx xorl 20(%rsp),%ebp movl %esi,%eax movl %edx,16(%rsp) movl %r13d,%ecx xorl 28(%rsp),%ebp xorl %r11d,%eax roll $5,%ecx xorl 52(%rsp),%ebp leal 1859775393(%rdx,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%ebp xorl 24(%rsp),%r14d movl %r13d,%eax movl %ebp,20(%rsp) movl %r12d,%ecx xorl 32(%rsp),%r14d xorl %edi,%eax roll $5,%ecx xorl 56(%rsp),%r14d leal 1859775393(%rbp,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%r14d xorl 28(%rsp),%edx movl %r12d,%eax movl %r14d,24(%rsp) movl %r11d,%ecx xorl 36(%rsp),%edx xorl %esi,%eax roll $5,%ecx xorl 60(%rsp),%edx leal 1859775393(%r14,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%edx xorl 32(%rsp),%ebp movl %r11d,%eax movl %edx,28(%rsp) movl %edi,%ecx xorl 40(%rsp),%ebp xorl %r13d,%eax roll $5,%ecx xorl 0(%rsp),%ebp leal 1859775393(%rdx,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%ebp xorl 36(%rsp),%r14d movl %r12d,%eax movl %ebp,32(%rsp) movl %r12d,%ebx xorl 44(%rsp),%r14d andl %r11d,%eax movl %esi,%ecx xorl 4(%rsp),%r14d leal -1894007588(%rbp,%r13,1),%r13d xorl %r11d,%ebx roll $5,%ecx addl %eax,%r13d roll $1,%r14d andl %edi,%ebx addl %ecx,%r13d roll $30,%edi addl %ebx,%r13d xorl 40(%rsp),%edx movl %r11d,%eax movl %r14d,36(%rsp) movl %r11d,%ebx xorl 48(%rsp),%edx andl %edi,%eax movl %r13d,%ecx xorl 8(%rsp),%edx leal -1894007588(%r14,%r12,1),%r12d xorl %edi,%ebx roll $5,%ecx addl %eax,%r12d roll $1,%edx andl %esi,%ebx addl %ecx,%r12d roll $30,%esi addl %ebx,%r12d xorl 44(%rsp),%ebp movl %edi,%eax movl %edx,40(%rsp) movl %edi,%ebx xorl 52(%rsp),%ebp andl %esi,%eax movl %r12d,%ecx xorl 12(%rsp),%ebp leal -1894007588(%rdx,%r11,1),%r11d xorl %esi,%ebx roll $5,%ecx addl %eax,%r11d roll $1,%ebp andl %r13d,%ebx addl %ecx,%r11d roll $30,%r13d addl %ebx,%r11d xorl 48(%rsp),%r14d movl %esi,%eax movl %ebp,44(%rsp) movl %esi,%ebx xorl 56(%rsp),%r14d andl %r13d,%eax movl %r11d,%ecx xorl 16(%rsp),%r14d leal -1894007588(%rbp,%rdi,1),%edi xorl %r13d,%ebx roll $5,%ecx addl %eax,%edi roll $1,%r14d andl %r12d,%ebx addl %ecx,%edi roll $30,%r12d addl %ebx,%edi xorl 52(%rsp),%edx movl %r13d,%eax movl %r14d,48(%rsp) movl %r13d,%ebx xorl 60(%rsp),%edx andl %r12d,%eax movl %edi,%ecx xorl 20(%rsp),%edx leal -1894007588(%r14,%rsi,1),%esi xorl %r12d,%ebx roll $5,%ecx addl %eax,%esi roll $1,%edx andl %r11d,%ebx addl %ecx,%esi roll $30,%r11d addl %ebx,%esi xorl 56(%rsp),%ebp movl %r12d,%eax movl %edx,52(%rsp) movl %r12d,%ebx xorl 0(%rsp),%ebp andl %r11d,%eax movl %esi,%ecx xorl 24(%rsp),%ebp leal -1894007588(%rdx,%r13,1),%r13d xorl %r11d,%ebx roll $5,%ecx addl %eax,%r13d roll $1,%ebp andl %edi,%ebx addl %ecx,%r13d roll $30,%edi addl %ebx,%r13d xorl 60(%rsp),%r14d movl %r11d,%eax movl %ebp,56(%rsp) movl %r11d,%ebx xorl 4(%rsp),%r14d andl %edi,%eax movl %r13d,%ecx xorl 28(%rsp),%r14d leal -1894007588(%rbp,%r12,1),%r12d xorl %edi,%ebx roll $5,%ecx addl %eax,%r12d roll $1,%r14d andl %esi,%ebx addl %ecx,%r12d roll $30,%esi addl %ebx,%r12d xorl 0(%rsp),%edx movl %edi,%eax movl %r14d,60(%rsp) movl %edi,%ebx xorl 8(%rsp),%edx andl %esi,%eax movl %r12d,%ecx xorl 32(%rsp),%edx leal -1894007588(%r14,%r11,1),%r11d xorl %esi,%ebx roll $5,%ecx addl %eax,%r11d roll $1,%edx andl %r13d,%ebx addl %ecx,%r11d roll $30,%r13d addl %ebx,%r11d xorl 4(%rsp),%ebp movl %esi,%eax movl %edx,0(%rsp) movl %esi,%ebx xorl 12(%rsp),%ebp andl %r13d,%eax movl %r11d,%ecx xorl 36(%rsp),%ebp leal -1894007588(%rdx,%rdi,1),%edi xorl %r13d,%ebx roll $5,%ecx addl %eax,%edi roll $1,%ebp andl %r12d,%ebx addl %ecx,%edi roll $30,%r12d addl %ebx,%edi xorl 8(%rsp),%r14d movl %r13d,%eax movl %ebp,4(%rsp) movl %r13d,%ebx xorl 16(%rsp),%r14d andl %r12d,%eax movl %edi,%ecx xorl 40(%rsp),%r14d leal -1894007588(%rbp,%rsi,1),%esi xorl %r12d,%ebx roll $5,%ecx addl %eax,%esi roll $1,%r14d andl %r11d,%ebx addl %ecx,%esi roll $30,%r11d addl %ebx,%esi xorl 12(%rsp),%edx movl %r12d,%eax movl %r14d,8(%rsp) movl %r12d,%ebx xorl 20(%rsp),%edx andl %r11d,%eax movl %esi,%ecx xorl 44(%rsp),%edx leal -1894007588(%r14,%r13,1),%r13d xorl %r11d,%ebx roll $5,%ecx addl %eax,%r13d roll $1,%edx andl %edi,%ebx addl %ecx,%r13d roll $30,%edi addl %ebx,%r13d xorl 16(%rsp),%ebp movl %r11d,%eax movl %edx,12(%rsp) movl %r11d,%ebx xorl 24(%rsp),%ebp andl %edi,%eax movl %r13d,%ecx xorl 48(%rsp),%ebp leal -1894007588(%rdx,%r12,1),%r12d xorl %edi,%ebx roll $5,%ecx addl %eax,%r12d roll $1,%ebp andl %esi,%ebx addl %ecx,%r12d roll $30,%esi addl %ebx,%r12d xorl 20(%rsp),%r14d movl %edi,%eax movl %ebp,16(%rsp) movl %edi,%ebx xorl 28(%rsp),%r14d andl %esi,%eax movl %r12d,%ecx xorl 52(%rsp),%r14d leal -1894007588(%rbp,%r11,1),%r11d xorl %esi,%ebx roll $5,%ecx addl %eax,%r11d roll $1,%r14d andl %r13d,%ebx addl %ecx,%r11d roll $30,%r13d addl %ebx,%r11d xorl 24(%rsp),%edx movl %esi,%eax movl %r14d,20(%rsp) movl %esi,%ebx xorl 32(%rsp),%edx andl %r13d,%eax movl %r11d,%ecx xorl 56(%rsp),%edx leal -1894007588(%r14,%rdi,1),%edi xorl %r13d,%ebx roll $5,%ecx addl %eax,%edi roll $1,%edx andl %r12d,%ebx addl %ecx,%edi roll $30,%r12d addl %ebx,%edi xorl 28(%rsp),%ebp movl %r13d,%eax movl %edx,24(%rsp) movl %r13d,%ebx xorl 36(%rsp),%ebp andl %r12d,%eax movl %edi,%ecx xorl 60(%rsp),%ebp leal -1894007588(%rdx,%rsi,1),%esi xorl %r12d,%ebx roll $5,%ecx addl %eax,%esi roll $1,%ebp andl %r11d,%ebx addl %ecx,%esi roll $30,%r11d addl %ebx,%esi xorl 32(%rsp),%r14d movl %r12d,%eax movl %ebp,28(%rsp) movl %r12d,%ebx xorl 40(%rsp),%r14d andl %r11d,%eax movl %esi,%ecx xorl 0(%rsp),%r14d leal -1894007588(%rbp,%r13,1),%r13d xorl %r11d,%ebx roll $5,%ecx addl %eax,%r13d roll $1,%r14d andl %edi,%ebx addl %ecx,%r13d roll $30,%edi addl %ebx,%r13d xorl 36(%rsp),%edx movl %r11d,%eax movl %r14d,32(%rsp) movl %r11d,%ebx xorl 44(%rsp),%edx andl %edi,%eax movl %r13d,%ecx xorl 4(%rsp),%edx leal -1894007588(%r14,%r12,1),%r12d xorl %edi,%ebx roll $5,%ecx addl %eax,%r12d roll $1,%edx andl %esi,%ebx addl %ecx,%r12d roll $30,%esi addl %ebx,%r12d xorl 40(%rsp),%ebp movl %edi,%eax movl %edx,36(%rsp) movl %edi,%ebx xorl 48(%rsp),%ebp andl %esi,%eax movl %r12d,%ecx xorl 8(%rsp),%ebp leal -1894007588(%rdx,%r11,1),%r11d xorl %esi,%ebx roll $5,%ecx addl %eax,%r11d roll $1,%ebp andl %r13d,%ebx addl %ecx,%r11d roll $30,%r13d addl %ebx,%r11d xorl 44(%rsp),%r14d movl %esi,%eax movl %ebp,40(%rsp) movl %esi,%ebx xorl 52(%rsp),%r14d andl %r13d,%eax movl %r11d,%ecx xorl 12(%rsp),%r14d leal -1894007588(%rbp,%rdi,1),%edi xorl %r13d,%ebx roll $5,%ecx addl %eax,%edi roll $1,%r14d andl %r12d,%ebx addl %ecx,%edi roll $30,%r12d addl %ebx,%edi xorl 48(%rsp),%edx movl %r13d,%eax movl %r14d,44(%rsp) movl %r13d,%ebx xorl 56(%rsp),%edx andl %r12d,%eax movl %edi,%ecx xorl 16(%rsp),%edx leal -1894007588(%r14,%rsi,1),%esi xorl %r12d,%ebx roll $5,%ecx addl %eax,%esi roll $1,%edx andl %r11d,%ebx addl %ecx,%esi roll $30,%r11d addl %ebx,%esi xorl 52(%rsp),%ebp movl %edi,%eax movl %edx,48(%rsp) movl %esi,%ecx xorl 60(%rsp),%ebp xorl %r12d,%eax roll $5,%ecx xorl 20(%rsp),%ebp leal -899497514(%rdx,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%ebp xorl 56(%rsp),%r14d movl %esi,%eax movl %ebp,52(%rsp) movl %r13d,%ecx xorl 0(%rsp),%r14d xorl %r11d,%eax roll $5,%ecx xorl 24(%rsp),%r14d leal -899497514(%rbp,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%r14d xorl 60(%rsp),%edx movl %r13d,%eax movl %r14d,56(%rsp) movl %r12d,%ecx xorl 4(%rsp),%edx xorl %edi,%eax roll $5,%ecx xorl 28(%rsp),%edx leal -899497514(%r14,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%edx xorl 0(%rsp),%ebp movl %r12d,%eax movl %edx,60(%rsp) movl %r11d,%ecx xorl 8(%rsp),%ebp xorl %esi,%eax roll $5,%ecx xorl 32(%rsp),%ebp leal -899497514(%rdx,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%ebp xorl 4(%rsp),%r14d movl %r11d,%eax movl %ebp,0(%rsp) movl %edi,%ecx xorl 12(%rsp),%r14d xorl %r13d,%eax roll $5,%ecx xorl 36(%rsp),%r14d leal -899497514(%rbp,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%r14d xorl 8(%rsp),%edx movl %edi,%eax movl %r14d,4(%rsp) movl %esi,%ecx xorl 16(%rsp),%edx xorl %r12d,%eax roll $5,%ecx xorl 40(%rsp),%edx leal -899497514(%r14,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%edx xorl 12(%rsp),%ebp movl %esi,%eax movl %edx,8(%rsp) movl %r13d,%ecx xorl 20(%rsp),%ebp xorl %r11d,%eax roll $5,%ecx xorl 44(%rsp),%ebp leal -899497514(%rdx,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%ebp xorl 16(%rsp),%r14d movl %r13d,%eax movl %ebp,12(%rsp) movl %r12d,%ecx xorl 24(%rsp),%r14d xorl %edi,%eax roll $5,%ecx xorl 48(%rsp),%r14d leal -899497514(%rbp,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%r14d xorl 20(%rsp),%edx movl %r12d,%eax movl %r14d,16(%rsp) movl %r11d,%ecx xorl 28(%rsp),%edx xorl %esi,%eax roll $5,%ecx xorl 52(%rsp),%edx leal -899497514(%r14,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%edx xorl 24(%rsp),%ebp movl %r11d,%eax movl %edx,20(%rsp) movl %edi,%ecx xorl 32(%rsp),%ebp xorl %r13d,%eax roll $5,%ecx xorl 56(%rsp),%ebp leal -899497514(%rdx,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%ebp xorl 28(%rsp),%r14d movl %edi,%eax movl %ebp,24(%rsp) movl %esi,%ecx xorl 36(%rsp),%r14d xorl %r12d,%eax roll $5,%ecx xorl 60(%rsp),%r14d leal -899497514(%rbp,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%r14d xorl 32(%rsp),%edx movl %esi,%eax movl %r14d,28(%rsp) movl %r13d,%ecx xorl 40(%rsp),%edx xorl %r11d,%eax roll $5,%ecx xorl 0(%rsp),%edx leal -899497514(%r14,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%edx xorl 36(%rsp),%ebp movl %r13d,%eax movl %r12d,%ecx xorl 44(%rsp),%ebp xorl %edi,%eax roll $5,%ecx xorl 4(%rsp),%ebp leal -899497514(%rdx,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%ebp xorl 40(%rsp),%r14d movl %r12d,%eax movl %r11d,%ecx xorl 48(%rsp),%r14d xorl %esi,%eax roll $5,%ecx xorl 8(%rsp),%r14d leal -899497514(%rbp,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%r14d xorl 44(%rsp),%edx movl %r11d,%eax movl %edi,%ecx xorl 52(%rsp),%edx xorl %r13d,%eax roll $5,%ecx xorl 12(%rsp),%edx leal -899497514(%r14,%rsi,1),%esi xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi roll $1,%edx xorl 48(%rsp),%ebp movl %edi,%eax movl %esi,%ecx xorl 56(%rsp),%ebp xorl %r12d,%eax roll $5,%ecx xorl 16(%rsp),%ebp leal -899497514(%rdx,%r13,1),%r13d xorl %r11d,%eax addl %ecx,%r13d roll $30,%edi addl %eax,%r13d roll $1,%ebp xorl 52(%rsp),%r14d movl %esi,%eax movl %r13d,%ecx xorl 60(%rsp),%r14d xorl %r11d,%eax roll $5,%ecx xorl 20(%rsp),%r14d leal -899497514(%rbp,%r12,1),%r12d xorl %edi,%eax addl %ecx,%r12d roll $30,%esi addl %eax,%r12d roll $1,%r14d xorl 56(%rsp),%edx movl %r13d,%eax movl %r12d,%ecx xorl 0(%rsp),%edx xorl %edi,%eax roll $5,%ecx xorl 24(%rsp),%edx leal -899497514(%r14,%r11,1),%r11d xorl %esi,%eax addl %ecx,%r11d roll $30,%r13d addl %eax,%r11d roll $1,%edx xorl 60(%rsp),%ebp movl %r12d,%eax movl %r11d,%ecx xorl 4(%rsp),%ebp xorl %esi,%eax roll $5,%ecx xorl 28(%rsp),%ebp leal -899497514(%rdx,%rdi,1),%edi xorl %r13d,%eax addl %ecx,%edi roll $30,%r12d addl %eax,%edi roll $1,%ebp movl %r11d,%eax movl %edi,%ecx xorl %r13d,%eax leal -899497514(%rbp,%rsi,1),%esi roll $5,%ecx xorl %r12d,%eax addl %ecx,%esi roll $30,%r11d addl %eax,%esi addl 0(%r8),%esi addl 4(%r8),%edi addl 8(%r8),%r11d addl 12(%r8),%r12d addl 16(%r8),%r13d movl %esi,0(%r8) movl %edi,4(%r8) movl %r11d,8(%r8) movl %r12d,12(%r8) movl %r13d,16(%r8) subq $1,%r10 leaq 64(%r9),%r9 jnz .Lloop movq 64(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue: .byte 0xf3,0xc3 .cfi_endproc .size sha1_block_data_order_nohw,.-sha1_block_data_order_nohw .globl sha1_block_data_order_hw .hidden sha1_block_data_order_hw .type sha1_block_data_order_hw,@function .align 32 sha1_block_data_order_hw: .cfi_startproc _CET_ENDBR movdqu (%rdi),%xmm0 movd 16(%rdi),%xmm1 movdqa K_XX_XX+160(%rip),%xmm3 movdqu (%rsi),%xmm4 pshufd $27,%xmm0,%xmm0 movdqu 16(%rsi),%xmm5 pshufd $27,%xmm1,%xmm1 movdqu 32(%rsi),%xmm6 .byte 102,15,56,0,227 movdqu 48(%rsi),%xmm7 .byte 102,15,56,0,235 .byte 102,15,56,0,243 movdqa %xmm1,%xmm9 .byte 102,15,56,0,251 jmp .Loop_shaext .align 16 .Loop_shaext: decq %rdx leaq 64(%rsi),%r8 paddd %xmm4,%xmm1 cmovneq %r8,%rsi prefetcht0 512(%rsi) movdqa %xmm0,%xmm8 .byte 15,56,201,229 movdqa %xmm0,%xmm2 .byte 15,58,204,193,0 .byte 15,56,200,213 pxor %xmm6,%xmm4 .byte 15,56,201,238 .byte 15,56,202,231 movdqa %xmm0,%xmm1 .byte 15,58,204,194,0 .byte 15,56,200,206 pxor %xmm7,%xmm5 .byte 15,56,202,236 .byte 15,56,201,247 movdqa %xmm0,%xmm2 .byte 15,58,204,193,0 .byte 15,56,200,215 pxor %xmm4,%xmm6 .byte 15,56,201,252 .byte 15,56,202,245 movdqa %xmm0,%xmm1 .byte 15,58,204,194,0 .byte 15,56,200,204 pxor %xmm5,%xmm7 .byte 15,56,202,254 .byte 15,56,201,229 movdqa %xmm0,%xmm2 .byte 15,58,204,193,0 .byte 15,56,200,213 pxor %xmm6,%xmm4 .byte 15,56,201,238 .byte 15,56,202,231 movdqa %xmm0,%xmm1 .byte 15,58,204,194,1 .byte 15,56,200,206 pxor %xmm7,%xmm5 .byte 15,56,202,236 .byte 15,56,201,247 movdqa %xmm0,%xmm2 .byte 15,58,204,193,1 .byte 15,56,200,215 pxor %xmm4,%xmm6 .byte 15,56,201,252 .byte 15,56,202,245 movdqa %xmm0,%xmm1 .byte 15,58,204,194,1 .byte 15,56,200,204 pxor %xmm5,%xmm7 .byte 15,56,202,254 .byte 15,56,201,229 movdqa %xmm0,%xmm2 .byte 15,58,204,193,1 .byte 15,56,200,213 pxor %xmm6,%xmm4 .byte 15,56,201,238 .byte 15,56,202,231 movdqa %xmm0,%xmm1 .byte 15,58,204,194,1 .byte 15,56,200,206 pxor %xmm7,%xmm5 .byte 15,56,202,236 .byte 15,56,201,247 movdqa %xmm0,%xmm2 .byte 15,58,204,193,2 .byte 15,56,200,215 pxor %xmm4,%xmm6 .byte 15,56,201,252 .byte 15,56,202,245 movdqa %xmm0,%xmm1 .byte 15,58,204,194,2 .byte 15,56,200,204 pxor %xmm5,%xmm7 .byte 15,56,202,254 .byte 15,56,201,229 movdqa %xmm0,%xmm2 .byte 15,58,204,193,2 .byte 15,56,200,213 pxor %xmm6,%xmm4 .byte 15,56,201,238 .byte 15,56,202,231 movdqa %xmm0,%xmm1 .byte 15,58,204,194,2 .byte 15,56,200,206 pxor %xmm7,%xmm5 .byte 15,56,202,236 .byte 15,56,201,247 movdqa %xmm0,%xmm2 .byte 15,58,204,193,2 .byte 15,56,200,215 pxor %xmm4,%xmm6 .byte 15,56,201,252 .byte 15,56,202,245 movdqa %xmm0,%xmm1 .byte 15,58,204,194,3 .byte 15,56,200,204 pxor %xmm5,%xmm7 .byte 15,56,202,254 movdqu (%rsi),%xmm4 movdqa %xmm0,%xmm2 .byte 15,58,204,193,3 .byte 15,56,200,213 movdqu 16(%rsi),%xmm5 .byte 102,15,56,0,227 movdqa %xmm0,%xmm1 .byte 15,58,204,194,3 .byte 15,56,200,206 movdqu 32(%rsi),%xmm6 .byte 102,15,56,0,235 movdqa %xmm0,%xmm2 .byte 15,58,204,193,3 .byte 15,56,200,215 movdqu 48(%rsi),%xmm7 .byte 102,15,56,0,243 movdqa %xmm0,%xmm1 .byte 15,58,204,194,3 .byte 65,15,56,200,201 .byte 102,15,56,0,251 paddd %xmm8,%xmm0 movdqa %xmm1,%xmm9 jnz .Loop_shaext pshufd $27,%xmm0,%xmm0 pshufd $27,%xmm1,%xmm1 movdqu %xmm0,(%rdi) movd %xmm1,16(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size sha1_block_data_order_hw,.-sha1_block_data_order_hw .globl sha1_block_data_order_ssse3 .hidden sha1_block_data_order_ssse3 .type sha1_block_data_order_ssse3,@function .align 16 sha1_block_data_order_ssse3: .cfi_startproc _CET_ENDBR movq %rsp,%r11 .cfi_def_cfa_register %r11 pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 leaq -64(%rsp),%rsp andq $-64,%rsp movq %rdi,%r8 movq %rsi,%r9 movq %rdx,%r10 shlq $6,%r10 addq %r9,%r10 leaq K_XX_XX+64(%rip),%r14 movl 0(%r8),%eax movl 4(%r8),%ebx movl 8(%r8),%ecx movl 12(%r8),%edx movl %ebx,%esi movl 16(%r8),%ebp movl %ecx,%edi xorl %edx,%edi andl %edi,%esi movdqa 64(%r14),%xmm6 movdqa -64(%r14),%xmm9 movdqu 0(%r9),%xmm0 movdqu 16(%r9),%xmm1 movdqu 32(%r9),%xmm2 movdqu 48(%r9),%xmm3 .byte 102,15,56,0,198 .byte 102,15,56,0,206 .byte 102,15,56,0,214 addq $64,%r9 paddd %xmm9,%xmm0 .byte 102,15,56,0,222 paddd %xmm9,%xmm1 paddd %xmm9,%xmm2 movdqa %xmm0,0(%rsp) psubd %xmm9,%xmm0 movdqa %xmm1,16(%rsp) psubd %xmm9,%xmm1 movdqa %xmm2,32(%rsp) psubd %xmm9,%xmm2 jmp .Loop_ssse3 .align 16 .Loop_ssse3: rorl $2,%ebx pshufd $238,%xmm0,%xmm4 xorl %edx,%esi movdqa %xmm3,%xmm8 paddd %xmm3,%xmm9 movl %eax,%edi addl 0(%rsp),%ebp punpcklqdq %xmm1,%xmm4 xorl %ecx,%ebx roll $5,%eax addl %esi,%ebp psrldq $4,%xmm8 andl %ebx,%edi xorl %ecx,%ebx pxor %xmm0,%xmm4 addl %eax,%ebp rorl $7,%eax pxor %xmm2,%xmm8 xorl %ecx,%edi movl %ebp,%esi addl 4(%rsp),%edx pxor %xmm8,%xmm4 xorl %ebx,%eax roll $5,%ebp movdqa %xmm9,48(%rsp) addl %edi,%edx andl %eax,%esi movdqa %xmm4,%xmm10 xorl %ebx,%eax addl %ebp,%edx rorl $7,%ebp movdqa %xmm4,%xmm8 xorl %ebx,%esi pslldq $12,%xmm10 paddd %xmm4,%xmm4 movl %edx,%edi addl 8(%rsp),%ecx psrld $31,%xmm8 xorl %eax,%ebp roll $5,%edx addl %esi,%ecx movdqa %xmm10,%xmm9 andl %ebp,%edi xorl %eax,%ebp psrld $30,%xmm10 addl %edx,%ecx rorl $7,%edx por %xmm8,%xmm4 xorl %eax,%edi movl %ecx,%esi addl 12(%rsp),%ebx pslld $2,%xmm9 pxor %xmm10,%xmm4 xorl %ebp,%edx movdqa -64(%r14),%xmm10 roll $5,%ecx addl %edi,%ebx andl %edx,%esi pxor %xmm9,%xmm4 xorl %ebp,%edx addl %ecx,%ebx rorl $7,%ecx pshufd $238,%xmm1,%xmm5 xorl %ebp,%esi movdqa %xmm4,%xmm9 paddd %xmm4,%xmm10 movl %ebx,%edi addl 16(%rsp),%eax punpcklqdq %xmm2,%xmm5 xorl %edx,%ecx roll $5,%ebx addl %esi,%eax psrldq $4,%xmm9 andl %ecx,%edi xorl %edx,%ecx pxor %xmm1,%xmm5 addl %ebx,%eax rorl $7,%ebx pxor %xmm3,%xmm9 xorl %edx,%edi movl %eax,%esi addl 20(%rsp),%ebp pxor %xmm9,%xmm5 xorl %ecx,%ebx roll $5,%eax movdqa %xmm10,0(%rsp) addl %edi,%ebp andl %ebx,%esi movdqa %xmm5,%xmm8 xorl %ecx,%ebx addl %eax,%ebp rorl $7,%eax movdqa %xmm5,%xmm9 xorl %ecx,%esi pslldq $12,%xmm8 paddd %xmm5,%xmm5 movl %ebp,%edi addl 24(%rsp),%edx psrld $31,%xmm9 xorl %ebx,%eax roll $5,%ebp addl %esi,%edx movdqa %xmm8,%xmm10 andl %eax,%edi xorl %ebx,%eax psrld $30,%xmm8 addl %ebp,%edx rorl $7,%ebp por %xmm9,%xmm5 xorl %ebx,%edi movl %edx,%esi addl 28(%rsp),%ecx pslld $2,%xmm10 pxor %xmm8,%xmm5 xorl %eax,%ebp movdqa -32(%r14),%xmm8 roll $5,%edx addl %edi,%ecx andl %ebp,%esi pxor %xmm10,%xmm5 xorl %eax,%ebp addl %edx,%ecx rorl $7,%edx pshufd $238,%xmm2,%xmm6 xorl %eax,%esi movdqa %xmm5,%xmm10 paddd %xmm5,%xmm8 movl %ecx,%edi addl 32(%rsp),%ebx punpcklqdq %xmm3,%xmm6 xorl %ebp,%edx roll $5,%ecx addl %esi,%ebx psrldq $4,%xmm10 andl %edx,%edi xorl %ebp,%edx pxor %xmm2,%xmm6 addl %ecx,%ebx rorl $7,%ecx pxor %xmm4,%xmm10 xorl %ebp,%edi movl %ebx,%esi addl 36(%rsp),%eax pxor %xmm10,%xmm6 xorl %edx,%ecx roll $5,%ebx movdqa %xmm8,16(%rsp) addl %edi,%eax andl %ecx,%esi movdqa %xmm6,%xmm9 xorl %edx,%ecx addl %ebx,%eax rorl $7,%ebx movdqa %xmm6,%xmm10 xorl %edx,%esi pslldq $12,%xmm9 paddd %xmm6,%xmm6 movl %eax,%edi addl 40(%rsp),%ebp psrld $31,%xmm10 xorl %ecx,%ebx roll $5,%eax addl %esi,%ebp movdqa %xmm9,%xmm8 andl %ebx,%edi xorl %ecx,%ebx psrld $30,%xmm9 addl %eax,%ebp rorl $7,%eax por %xmm10,%xmm6 xorl %ecx,%edi movl %ebp,%esi addl 44(%rsp),%edx pslld $2,%xmm8 pxor %xmm9,%xmm6 xorl %ebx,%eax movdqa -32(%r14),%xmm9 roll $5,%ebp addl %edi,%edx andl %eax,%esi pxor %xmm8,%xmm6 xorl %ebx,%eax addl %ebp,%edx rorl $7,%ebp pshufd $238,%xmm3,%xmm7 xorl %ebx,%esi movdqa %xmm6,%xmm8 paddd %xmm6,%xmm9 movl %edx,%edi addl 48(%rsp),%ecx punpcklqdq %xmm4,%xmm7 xorl %eax,%ebp roll $5,%edx addl %esi,%ecx psrldq $4,%xmm8 andl %ebp,%edi xorl %eax,%ebp pxor %xmm3,%xmm7 addl %edx,%ecx rorl $7,%edx pxor %xmm5,%xmm8 xorl %eax,%edi movl %ecx,%esi addl 52(%rsp),%ebx pxor %xmm8,%xmm7 xorl %ebp,%edx roll $5,%ecx movdqa %xmm9,32(%rsp) addl %edi,%ebx andl %edx,%esi movdqa %xmm7,%xmm10 xorl %ebp,%edx addl %ecx,%ebx rorl $7,%ecx movdqa %xmm7,%xmm8 xorl %ebp,%esi pslldq $12,%xmm10 paddd %xmm7,%xmm7 movl %ebx,%edi addl 56(%rsp),%eax psrld $31,%xmm8 xorl %edx,%ecx roll $5,%ebx addl %esi,%eax movdqa %xmm10,%xmm9 andl %ecx,%edi xorl %edx,%ecx psrld $30,%xmm10 addl %ebx,%eax rorl $7,%ebx por %xmm8,%xmm7 xorl %edx,%edi movl %eax,%esi addl 60(%rsp),%ebp pslld $2,%xmm9 pxor %xmm10,%xmm7 xorl %ecx,%ebx movdqa -32(%r14),%xmm10 roll $5,%eax addl %edi,%ebp andl %ebx,%esi pxor %xmm9,%xmm7 pshufd $238,%xmm6,%xmm9 xorl %ecx,%ebx addl %eax,%ebp rorl $7,%eax pxor %xmm4,%xmm0 xorl %ecx,%esi movl %ebp,%edi addl 0(%rsp),%edx punpcklqdq %xmm7,%xmm9 xorl %ebx,%eax roll $5,%ebp pxor %xmm1,%xmm0 addl %esi,%edx andl %eax,%edi movdqa %xmm10,%xmm8 xorl %ebx,%eax paddd %xmm7,%xmm10 addl %ebp,%edx pxor %xmm9,%xmm0 rorl $7,%ebp xorl %ebx,%edi movl %edx,%esi addl 4(%rsp),%ecx movdqa %xmm0,%xmm9 xorl %eax,%ebp roll $5,%edx movdqa %xmm10,48(%rsp) addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp pslld $2,%xmm0 addl %edx,%ecx rorl $7,%edx psrld $30,%xmm9 xorl %eax,%esi movl %ecx,%edi addl 8(%rsp),%ebx por %xmm9,%xmm0 xorl %ebp,%edx roll $5,%ecx pshufd $238,%xmm7,%xmm10 addl %esi,%ebx andl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 12(%rsp),%eax xorl %ebp,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax pxor %xmm5,%xmm1 addl 16(%rsp),%ebp xorl %ecx,%esi punpcklqdq %xmm0,%xmm10 movl %eax,%edi roll $5,%eax pxor %xmm2,%xmm1 addl %esi,%ebp xorl %ecx,%edi movdqa %xmm8,%xmm9 rorl $7,%ebx paddd %xmm0,%xmm8 addl %eax,%ebp pxor %xmm10,%xmm1 addl 20(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp movdqa %xmm1,%xmm10 addl %edi,%edx xorl %ebx,%esi movdqa %xmm8,0(%rsp) rorl $7,%eax addl %ebp,%edx addl 24(%rsp),%ecx pslld $2,%xmm1 xorl %eax,%esi movl %edx,%edi psrld $30,%xmm10 roll $5,%edx addl %esi,%ecx xorl %eax,%edi rorl $7,%ebp por %xmm10,%xmm1 addl %edx,%ecx addl 28(%rsp),%ebx pshufd $238,%xmm0,%xmm8 xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx pxor %xmm6,%xmm2 addl 32(%rsp),%eax xorl %edx,%esi punpcklqdq %xmm1,%xmm8 movl %ebx,%edi roll $5,%ebx pxor %xmm3,%xmm2 addl %esi,%eax xorl %edx,%edi movdqa 0(%r14),%xmm10 rorl $7,%ecx paddd %xmm1,%xmm9 addl %ebx,%eax pxor %xmm8,%xmm2 addl 36(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi roll $5,%eax movdqa %xmm2,%xmm8 addl %edi,%ebp xorl %ecx,%esi movdqa %xmm9,16(%rsp) rorl $7,%ebx addl %eax,%ebp addl 40(%rsp),%edx pslld $2,%xmm2 xorl %ebx,%esi movl %ebp,%edi psrld $30,%xmm8 roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax por %xmm8,%xmm2 addl %ebp,%edx addl 44(%rsp),%ecx pshufd $238,%xmm1,%xmm9 xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx pxor %xmm7,%xmm3 addl 48(%rsp),%ebx xorl %ebp,%esi punpcklqdq %xmm2,%xmm9 movl %ecx,%edi roll $5,%ecx pxor %xmm4,%xmm3 addl %esi,%ebx xorl %ebp,%edi movdqa %xmm10,%xmm8 rorl $7,%edx paddd %xmm2,%xmm10 addl %ecx,%ebx pxor %xmm9,%xmm3 addl 52(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx movdqa %xmm3,%xmm9 addl %edi,%eax xorl %edx,%esi movdqa %xmm10,32(%rsp) rorl $7,%ecx addl %ebx,%eax addl 56(%rsp),%ebp pslld $2,%xmm3 xorl %ecx,%esi movl %eax,%edi psrld $30,%xmm9 roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx por %xmm9,%xmm3 addl %eax,%ebp addl 60(%rsp),%edx pshufd $238,%xmm2,%xmm10 xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx pxor %xmm0,%xmm4 addl 0(%rsp),%ecx xorl %eax,%esi punpcklqdq %xmm3,%xmm10 movl %edx,%edi roll $5,%edx pxor %xmm5,%xmm4 addl %esi,%ecx xorl %eax,%edi movdqa %xmm8,%xmm9 rorl $7,%ebp paddd %xmm3,%xmm8 addl %edx,%ecx pxor %xmm10,%xmm4 addl 4(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx movdqa %xmm4,%xmm10 addl %edi,%ebx xorl %ebp,%esi movdqa %xmm8,48(%rsp) rorl $7,%edx addl %ecx,%ebx addl 8(%rsp),%eax pslld $2,%xmm4 xorl %edx,%esi movl %ebx,%edi psrld $30,%xmm10 roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx por %xmm10,%xmm4 addl %ebx,%eax addl 12(%rsp),%ebp pshufd $238,%xmm3,%xmm8 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp pxor %xmm1,%xmm5 addl 16(%rsp),%edx xorl %ebx,%esi punpcklqdq %xmm4,%xmm8 movl %ebp,%edi roll $5,%ebp pxor %xmm6,%xmm5 addl %esi,%edx xorl %ebx,%edi movdqa %xmm9,%xmm10 rorl $7,%eax paddd %xmm4,%xmm9 addl %ebp,%edx pxor %xmm8,%xmm5 addl 20(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx movdqa %xmm5,%xmm8 addl %edi,%ecx xorl %eax,%esi movdqa %xmm9,0(%rsp) rorl $7,%ebp addl %edx,%ecx addl 24(%rsp),%ebx pslld $2,%xmm5 xorl %ebp,%esi movl %ecx,%edi psrld $30,%xmm8 roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx por %xmm8,%xmm5 addl %ecx,%ebx addl 28(%rsp),%eax pshufd $238,%xmm4,%xmm9 rorl $7,%ecx movl %ebx,%esi xorl %edx,%edi roll $5,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax pxor %xmm2,%xmm6 addl 32(%rsp),%ebp andl %ecx,%esi xorl %edx,%ecx rorl $7,%ebx punpcklqdq %xmm5,%xmm9 movl %eax,%edi xorl %ecx,%esi pxor %xmm7,%xmm6 roll $5,%eax addl %esi,%ebp movdqa %xmm10,%xmm8 xorl %ebx,%edi paddd %xmm5,%xmm10 xorl %ecx,%ebx pxor %xmm9,%xmm6 addl %eax,%ebp addl 36(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx rorl $7,%eax movdqa %xmm6,%xmm9 movl %ebp,%esi xorl %ebx,%edi movdqa %xmm10,16(%rsp) roll $5,%ebp addl %edi,%edx xorl %eax,%esi pslld $2,%xmm6 xorl %ebx,%eax addl %ebp,%edx psrld $30,%xmm9 addl 40(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax por %xmm9,%xmm6 rorl $7,%ebp movl %edx,%edi xorl %eax,%esi roll $5,%edx pshufd $238,%xmm5,%xmm10 addl %esi,%ecx xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 44(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp rorl $7,%edx movl %ecx,%esi xorl %ebp,%edi roll $5,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx pxor %xmm3,%xmm7 addl 48(%rsp),%eax andl %edx,%esi xorl %ebp,%edx rorl $7,%ecx punpcklqdq %xmm6,%xmm10 movl %ebx,%edi xorl %edx,%esi pxor %xmm0,%xmm7 roll $5,%ebx addl %esi,%eax movdqa 32(%r14),%xmm9 xorl %ecx,%edi paddd %xmm6,%xmm8 xorl %edx,%ecx pxor %xmm10,%xmm7 addl %ebx,%eax addl 52(%rsp),%ebp andl %ecx,%edi xorl %edx,%ecx rorl $7,%ebx movdqa %xmm7,%xmm10 movl %eax,%esi xorl %ecx,%edi movdqa %xmm8,32(%rsp) roll $5,%eax addl %edi,%ebp xorl %ebx,%esi pslld $2,%xmm7 xorl %ecx,%ebx addl %eax,%ebp psrld $30,%xmm10 addl 56(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx por %xmm10,%xmm7 rorl $7,%eax movl %ebp,%edi xorl %ebx,%esi roll $5,%ebp pshufd $238,%xmm6,%xmm8 addl %esi,%edx xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 60(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax rorl $7,%ebp movl %edx,%esi xorl %eax,%edi roll $5,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx pxor %xmm4,%xmm0 addl 0(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp rorl $7,%edx punpcklqdq %xmm7,%xmm8 movl %ecx,%edi xorl %ebp,%esi pxor %xmm1,%xmm0 roll $5,%ecx addl %esi,%ebx movdqa %xmm9,%xmm10 xorl %edx,%edi paddd %xmm7,%xmm9 xorl %ebp,%edx pxor %xmm8,%xmm0 addl %ecx,%ebx addl 4(%rsp),%eax andl %edx,%edi xorl %ebp,%edx rorl $7,%ecx movdqa %xmm0,%xmm8 movl %ebx,%esi xorl %edx,%edi movdqa %xmm9,48(%rsp) roll $5,%ebx addl %edi,%eax xorl %ecx,%esi pslld $2,%xmm0 xorl %edx,%ecx addl %ebx,%eax psrld $30,%xmm8 addl 8(%rsp),%ebp andl %ecx,%esi xorl %edx,%ecx por %xmm8,%xmm0 rorl $7,%ebx movl %eax,%edi xorl %ecx,%esi roll $5,%eax pshufd $238,%xmm7,%xmm9 addl %esi,%ebp xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 12(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx rorl $7,%eax movl %ebp,%esi xorl %ebx,%edi roll $5,%ebp addl %edi,%edx xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx pxor %xmm5,%xmm1 addl 16(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax rorl $7,%ebp punpcklqdq %xmm0,%xmm9 movl %edx,%edi xorl %eax,%esi pxor %xmm2,%xmm1 roll $5,%edx addl %esi,%ecx movdqa %xmm10,%xmm8 xorl %ebp,%edi paddd %xmm0,%xmm10 xorl %eax,%ebp pxor %xmm9,%xmm1 addl %edx,%ecx addl 20(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp rorl $7,%edx movdqa %xmm1,%xmm9 movl %ecx,%esi xorl %ebp,%edi movdqa %xmm10,0(%rsp) roll $5,%ecx addl %edi,%ebx xorl %edx,%esi pslld $2,%xmm1 xorl %ebp,%edx addl %ecx,%ebx psrld $30,%xmm9 addl 24(%rsp),%eax andl %edx,%esi xorl %ebp,%edx por %xmm9,%xmm1 rorl $7,%ecx movl %ebx,%edi xorl %edx,%esi roll $5,%ebx pshufd $238,%xmm0,%xmm10 addl %esi,%eax xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 28(%rsp),%ebp andl %ecx,%edi xorl %edx,%ecx rorl $7,%ebx movl %eax,%esi xorl %ecx,%edi roll $5,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp pxor %xmm6,%xmm2 addl 32(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx rorl $7,%eax punpcklqdq %xmm1,%xmm10 movl %ebp,%edi xorl %ebx,%esi pxor %xmm3,%xmm2 roll $5,%ebp addl %esi,%edx movdqa %xmm8,%xmm9 xorl %eax,%edi paddd %xmm1,%xmm8 xorl %ebx,%eax pxor %xmm10,%xmm2 addl %ebp,%edx addl 36(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax rorl $7,%ebp movdqa %xmm2,%xmm10 movl %edx,%esi xorl %eax,%edi movdqa %xmm8,16(%rsp) roll $5,%edx addl %edi,%ecx xorl %ebp,%esi pslld $2,%xmm2 xorl %eax,%ebp addl %edx,%ecx psrld $30,%xmm10 addl 40(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp por %xmm10,%xmm2 rorl $7,%edx movl %ecx,%edi xorl %ebp,%esi roll $5,%ecx pshufd $238,%xmm1,%xmm8 addl %esi,%ebx xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 44(%rsp),%eax andl %edx,%edi xorl %ebp,%edx rorl $7,%ecx movl %ebx,%esi xorl %edx,%edi roll $5,%ebx addl %edi,%eax xorl %edx,%esi addl %ebx,%eax pxor %xmm7,%xmm3 addl 48(%rsp),%ebp xorl %ecx,%esi punpcklqdq %xmm2,%xmm8 movl %eax,%edi roll $5,%eax pxor %xmm4,%xmm3 addl %esi,%ebp xorl %ecx,%edi movdqa %xmm9,%xmm10 rorl $7,%ebx paddd %xmm2,%xmm9 addl %eax,%ebp pxor %xmm8,%xmm3 addl 52(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp movdqa %xmm3,%xmm8 addl %edi,%edx xorl %ebx,%esi movdqa %xmm9,32(%rsp) rorl $7,%eax addl %ebp,%edx addl 56(%rsp),%ecx pslld $2,%xmm3 xorl %eax,%esi movl %edx,%edi psrld $30,%xmm8 roll $5,%edx addl %esi,%ecx xorl %eax,%edi rorl $7,%ebp por %xmm8,%xmm3 addl %edx,%ecx addl 60(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx addl 0(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx paddd %xmm3,%xmm10 addl %esi,%eax xorl %edx,%edi movdqa %xmm10,48(%rsp) rorl $7,%ecx addl %ebx,%eax addl 4(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 8(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax addl %ebp,%edx addl 12(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx cmpq %r10,%r9 je .Ldone_ssse3 movdqa 64(%r14),%xmm6 movdqa -64(%r14),%xmm9 movdqu 0(%r9),%xmm0 movdqu 16(%r9),%xmm1 movdqu 32(%r9),%xmm2 movdqu 48(%r9),%xmm3 .byte 102,15,56,0,198 addq $64,%r9 addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi .byte 102,15,56,0,206 roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx paddd %xmm9,%xmm0 addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi movdqa %xmm0,0(%rsp) roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx psubd %xmm9,%xmm0 addl %ebx,%eax addl 24(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi .byte 102,15,56,0,214 roll $5,%edx addl %esi,%ecx xorl %eax,%edi rorl $7,%ebp paddd %xmm9,%xmm1 addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi movdqa %xmm1,16(%rsp) roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx psubd %xmm9,%xmm1 addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx addl %ebx,%eax addl 44(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi .byte 102,15,56,0,222 roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax paddd %xmm9,%xmm2 addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi movdqa %xmm2,32(%rsp) roll $5,%edx addl %edi,%ecx xorl %eax,%esi rorl $7,%ebp psubd %xmm9,%xmm2 addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax rorl $7,%ecx addl %ebx,%eax addl 0(%r8),%eax addl 4(%r8),%esi addl 8(%r8),%ecx addl 12(%r8),%edx movl %eax,0(%r8) addl 16(%r8),%ebp movl %esi,4(%r8) movl %esi,%ebx movl %ecx,8(%r8) movl %ecx,%edi movl %edx,12(%r8) xorl %edx,%edi movl %ebp,16(%r8) andl %edi,%esi jmp .Loop_ssse3 .align 16 .Ldone_ssse3: addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax addl 24(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi roll $5,%edx addl %esi,%ecx xorl %eax,%edi rorl $7,%ebp addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx addl %ebx,%eax addl 44(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax rorl $7,%ecx addl %ebx,%eax addl 0(%r8),%eax addl 4(%r8),%esi addl 8(%r8),%ecx movl %eax,0(%r8) addl 12(%r8),%edx movl %esi,4(%r8) addl 16(%r8),%ebp movl %ecx,8(%r8) movl %edx,12(%r8) movl %ebp,16(%r8) movq -40(%r11),%r14 .cfi_restore %r14 movq -32(%r11),%r13 .cfi_restore %r13 movq -24(%r11),%r12 .cfi_restore %r12 movq -16(%r11),%rbp .cfi_restore %rbp movq -8(%r11),%rbx .cfi_restore %rbx leaq (%r11),%rsp .cfi_def_cfa_register %rsp .Lepilogue_ssse3: .byte 0xf3,0xc3 .cfi_endproc .size sha1_block_data_order_ssse3,.-sha1_block_data_order_ssse3 .globl sha1_block_data_order_avx .hidden sha1_block_data_order_avx .type sha1_block_data_order_avx,@function .align 16 sha1_block_data_order_avx: .cfi_startproc _CET_ENDBR movq %rsp,%r11 .cfi_def_cfa_register %r11 pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 leaq -64(%rsp),%rsp vzeroupper andq $-64,%rsp movq %rdi,%r8 movq %rsi,%r9 movq %rdx,%r10 shlq $6,%r10 addq %r9,%r10 leaq K_XX_XX+64(%rip),%r14 movl 0(%r8),%eax movl 4(%r8),%ebx movl 8(%r8),%ecx movl 12(%r8),%edx movl %ebx,%esi movl 16(%r8),%ebp movl %ecx,%edi xorl %edx,%edi andl %edi,%esi vmovdqa 64(%r14),%xmm6 vmovdqa -64(%r14),%xmm11 vmovdqu 0(%r9),%xmm0 vmovdqu 16(%r9),%xmm1 vmovdqu 32(%r9),%xmm2 vmovdqu 48(%r9),%xmm3 vpshufb %xmm6,%xmm0,%xmm0 addq $64,%r9 vpshufb %xmm6,%xmm1,%xmm1 vpshufb %xmm6,%xmm2,%xmm2 vpshufb %xmm6,%xmm3,%xmm3 vpaddd %xmm11,%xmm0,%xmm4 vpaddd %xmm11,%xmm1,%xmm5 vpaddd %xmm11,%xmm2,%xmm6 vmovdqa %xmm4,0(%rsp) vmovdqa %xmm5,16(%rsp) vmovdqa %xmm6,32(%rsp) jmp .Loop_avx .align 16 .Loop_avx: shrdl $2,%ebx,%ebx xorl %edx,%esi vpalignr $8,%xmm0,%xmm1,%xmm4 movl %eax,%edi addl 0(%rsp),%ebp vpaddd %xmm3,%xmm11,%xmm9 xorl %ecx,%ebx shldl $5,%eax,%eax vpsrldq $4,%xmm3,%xmm8 addl %esi,%ebp andl %ebx,%edi vpxor %xmm0,%xmm4,%xmm4 xorl %ecx,%ebx addl %eax,%ebp vpxor %xmm2,%xmm8,%xmm8 shrdl $7,%eax,%eax xorl %ecx,%edi movl %ebp,%esi addl 4(%rsp),%edx vpxor %xmm8,%xmm4,%xmm4 xorl %ebx,%eax shldl $5,%ebp,%ebp vmovdqa %xmm9,48(%rsp) addl %edi,%edx andl %eax,%esi vpsrld $31,%xmm4,%xmm8 xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%esi vpslldq $12,%xmm4,%xmm10 vpaddd %xmm4,%xmm4,%xmm4 movl %edx,%edi addl 8(%rsp),%ecx xorl %eax,%ebp shldl $5,%edx,%edx vpsrld $30,%xmm10,%xmm9 vpor %xmm8,%xmm4,%xmm4 addl %esi,%ecx andl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx vpslld $2,%xmm10,%xmm10 vpxor %xmm9,%xmm4,%xmm4 shrdl $7,%edx,%edx xorl %eax,%edi movl %ecx,%esi addl 12(%rsp),%ebx vpxor %xmm10,%xmm4,%xmm4 xorl %ebp,%edx shldl $5,%ecx,%ecx addl %edi,%ebx andl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %ebp,%esi vpalignr $8,%xmm1,%xmm2,%xmm5 movl %ebx,%edi addl 16(%rsp),%eax vpaddd %xmm4,%xmm11,%xmm9 xorl %edx,%ecx shldl $5,%ebx,%ebx vpsrldq $4,%xmm4,%xmm8 addl %esi,%eax andl %ecx,%edi vpxor %xmm1,%xmm5,%xmm5 xorl %edx,%ecx addl %ebx,%eax vpxor %xmm3,%xmm8,%xmm8 shrdl $7,%ebx,%ebx xorl %edx,%edi movl %eax,%esi addl 20(%rsp),%ebp vpxor %xmm8,%xmm5,%xmm5 xorl %ecx,%ebx shldl $5,%eax,%eax vmovdqa %xmm9,0(%rsp) addl %edi,%ebp andl %ebx,%esi vpsrld $31,%xmm5,%xmm8 xorl %ecx,%ebx addl %eax,%ebp shrdl $7,%eax,%eax xorl %ecx,%esi vpslldq $12,%xmm5,%xmm10 vpaddd %xmm5,%xmm5,%xmm5 movl %ebp,%edi addl 24(%rsp),%edx xorl %ebx,%eax shldl $5,%ebp,%ebp vpsrld $30,%xmm10,%xmm9 vpor %xmm8,%xmm5,%xmm5 addl %esi,%edx andl %eax,%edi xorl %ebx,%eax addl %ebp,%edx vpslld $2,%xmm10,%xmm10 vpxor %xmm9,%xmm5,%xmm5 shrdl $7,%ebp,%ebp xorl %ebx,%edi movl %edx,%esi addl 28(%rsp),%ecx vpxor %xmm10,%xmm5,%xmm5 xorl %eax,%ebp shldl $5,%edx,%edx vmovdqa -32(%r14),%xmm11 addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi vpalignr $8,%xmm2,%xmm3,%xmm6 movl %ecx,%edi addl 32(%rsp),%ebx vpaddd %xmm5,%xmm11,%xmm9 xorl %ebp,%edx shldl $5,%ecx,%ecx vpsrldq $4,%xmm5,%xmm8 addl %esi,%ebx andl %edx,%edi vpxor %xmm2,%xmm6,%xmm6 xorl %ebp,%edx addl %ecx,%ebx vpxor %xmm4,%xmm8,%xmm8 shrdl $7,%ecx,%ecx xorl %ebp,%edi movl %ebx,%esi addl 36(%rsp),%eax vpxor %xmm8,%xmm6,%xmm6 xorl %edx,%ecx shldl $5,%ebx,%ebx vmovdqa %xmm9,16(%rsp) addl %edi,%eax andl %ecx,%esi vpsrld $31,%xmm6,%xmm8 xorl %edx,%ecx addl %ebx,%eax shrdl $7,%ebx,%ebx xorl %edx,%esi vpslldq $12,%xmm6,%xmm10 vpaddd %xmm6,%xmm6,%xmm6 movl %eax,%edi addl 40(%rsp),%ebp xorl %ecx,%ebx shldl $5,%eax,%eax vpsrld $30,%xmm10,%xmm9 vpor %xmm8,%xmm6,%xmm6 addl %esi,%ebp andl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp vpslld $2,%xmm10,%xmm10 vpxor %xmm9,%xmm6,%xmm6 shrdl $7,%eax,%eax xorl %ecx,%edi movl %ebp,%esi addl 44(%rsp),%edx vpxor %xmm10,%xmm6,%xmm6 xorl %ebx,%eax shldl $5,%ebp,%ebp addl %edi,%edx andl %eax,%esi xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%esi vpalignr $8,%xmm3,%xmm4,%xmm7 movl %edx,%edi addl 48(%rsp),%ecx vpaddd %xmm6,%xmm11,%xmm9 xorl %eax,%ebp shldl $5,%edx,%edx vpsrldq $4,%xmm6,%xmm8 addl %esi,%ecx andl %ebp,%edi vpxor %xmm3,%xmm7,%xmm7 xorl %eax,%ebp addl %edx,%ecx vpxor %xmm5,%xmm8,%xmm8 shrdl $7,%edx,%edx xorl %eax,%edi movl %ecx,%esi addl 52(%rsp),%ebx vpxor %xmm8,%xmm7,%xmm7 xorl %ebp,%edx shldl $5,%ecx,%ecx vmovdqa %xmm9,32(%rsp) addl %edi,%ebx andl %edx,%esi vpsrld $31,%xmm7,%xmm8 xorl %ebp,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %ebp,%esi vpslldq $12,%xmm7,%xmm10 vpaddd %xmm7,%xmm7,%xmm7 movl %ebx,%edi addl 56(%rsp),%eax xorl %edx,%ecx shldl $5,%ebx,%ebx vpsrld $30,%xmm10,%xmm9 vpor %xmm8,%xmm7,%xmm7 addl %esi,%eax andl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax vpslld $2,%xmm10,%xmm10 vpxor %xmm9,%xmm7,%xmm7 shrdl $7,%ebx,%ebx xorl %edx,%edi movl %eax,%esi addl 60(%rsp),%ebp vpxor %xmm10,%xmm7,%xmm7 xorl %ecx,%ebx shldl $5,%eax,%eax addl %edi,%ebp andl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp vpalignr $8,%xmm6,%xmm7,%xmm8 vpxor %xmm4,%xmm0,%xmm0 shrdl $7,%eax,%eax xorl %ecx,%esi movl %ebp,%edi addl 0(%rsp),%edx vpxor %xmm1,%xmm0,%xmm0 xorl %ebx,%eax shldl $5,%ebp,%ebp vpaddd %xmm7,%xmm11,%xmm9 addl %esi,%edx andl %eax,%edi vpxor %xmm8,%xmm0,%xmm0 xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%edi vpsrld $30,%xmm0,%xmm8 vmovdqa %xmm9,48(%rsp) movl %edx,%esi addl 4(%rsp),%ecx xorl %eax,%ebp shldl $5,%edx,%edx vpslld $2,%xmm0,%xmm0 addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi movl %ecx,%edi addl 8(%rsp),%ebx vpor %xmm8,%xmm0,%xmm0 xorl %ebp,%edx shldl $5,%ecx,%ecx addl %esi,%ebx andl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 12(%rsp),%eax xorl %ebp,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpalignr $8,%xmm7,%xmm0,%xmm8 vpxor %xmm5,%xmm1,%xmm1 addl 16(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax vpxor %xmm2,%xmm1,%xmm1 addl %esi,%ebp xorl %ecx,%edi vpaddd %xmm0,%xmm11,%xmm9 shrdl $7,%ebx,%ebx addl %eax,%ebp vpxor %xmm8,%xmm1,%xmm1 addl 20(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp vpsrld $30,%xmm1,%xmm8 vmovdqa %xmm9,0(%rsp) addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpslld $2,%xmm1,%xmm1 addl 24(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vpor %xmm8,%xmm1,%xmm1 addl 28(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpalignr $8,%xmm0,%xmm1,%xmm8 vpxor %xmm6,%xmm2,%xmm2 addl 32(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx vpxor %xmm3,%xmm2,%xmm2 addl %esi,%eax xorl %edx,%edi vpaddd %xmm1,%xmm11,%xmm9 vmovdqa 0(%r14),%xmm11 shrdl $7,%ecx,%ecx addl %ebx,%eax vpxor %xmm8,%xmm2,%xmm2 addl 36(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax vpsrld $30,%xmm2,%xmm8 vmovdqa %xmm9,16(%rsp) addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp vpslld $2,%xmm2,%xmm2 addl 40(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx vpor %xmm8,%xmm2,%xmm2 addl 44(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx vpalignr $8,%xmm1,%xmm2,%xmm8 vpxor %xmm7,%xmm3,%xmm3 addl 48(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx vpxor %xmm4,%xmm3,%xmm3 addl %esi,%ebx xorl %ebp,%edi vpaddd %xmm2,%xmm11,%xmm9 shrdl $7,%edx,%edx addl %ecx,%ebx vpxor %xmm8,%xmm3,%xmm3 addl 52(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx vpsrld $30,%xmm3,%xmm8 vmovdqa %xmm9,32(%rsp) addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpslld $2,%xmm3,%xmm3 addl 56(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp vpor %xmm8,%xmm3,%xmm3 addl 60(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpalignr $8,%xmm2,%xmm3,%xmm8 vpxor %xmm0,%xmm4,%xmm4 addl 0(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx vpxor %xmm5,%xmm4,%xmm4 addl %esi,%ecx xorl %eax,%edi vpaddd %xmm3,%xmm11,%xmm9 shrdl $7,%ebp,%ebp addl %edx,%ecx vpxor %xmm8,%xmm4,%xmm4 addl 4(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx vpsrld $30,%xmm4,%xmm8 vmovdqa %xmm9,48(%rsp) addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpslld $2,%xmm4,%xmm4 addl 8(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax vpor %xmm8,%xmm4,%xmm4 addl 12(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp vpalignr $8,%xmm3,%xmm4,%xmm8 vpxor %xmm1,%xmm5,%xmm5 addl 16(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp vpxor %xmm6,%xmm5,%xmm5 addl %esi,%edx xorl %ebx,%edi vpaddd %xmm4,%xmm11,%xmm9 shrdl $7,%eax,%eax addl %ebp,%edx vpxor %xmm8,%xmm5,%xmm5 addl 20(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx vpsrld $30,%xmm5,%xmm8 vmovdqa %xmm9,0(%rsp) addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx vpslld $2,%xmm5,%xmm5 addl 24(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx vpor %xmm8,%xmm5,%xmm5 addl 28(%rsp),%eax shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax vpalignr $8,%xmm4,%xmm5,%xmm8 vpxor %xmm2,%xmm6,%xmm6 addl 32(%rsp),%ebp andl %ecx,%esi xorl %edx,%ecx shrdl $7,%ebx,%ebx vpxor %xmm7,%xmm6,%xmm6 movl %eax,%edi xorl %ecx,%esi vpaddd %xmm5,%xmm11,%xmm9 shldl $5,%eax,%eax addl %esi,%ebp vpxor %xmm8,%xmm6,%xmm6 xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 36(%rsp),%edx vpsrld $30,%xmm6,%xmm8 vmovdqa %xmm9,16(%rsp) andl %ebx,%edi xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%esi vpslld $2,%xmm6,%xmm6 xorl %ebx,%edi shldl $5,%ebp,%ebp addl %edi,%edx xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx addl 40(%rsp),%ecx andl %eax,%esi vpor %xmm8,%xmm6,%xmm6 xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%edi xorl %eax,%esi shldl $5,%edx,%edx addl %esi,%ecx xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 44(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp shrdl $7,%edx,%edx movl %ecx,%esi xorl %ebp,%edi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx vpalignr $8,%xmm5,%xmm6,%xmm8 vpxor %xmm3,%xmm7,%xmm7 addl 48(%rsp),%eax andl %edx,%esi xorl %ebp,%edx shrdl $7,%ecx,%ecx vpxor %xmm0,%xmm7,%xmm7 movl %ebx,%edi xorl %edx,%esi vpaddd %xmm6,%xmm11,%xmm9 vmovdqa 32(%r14),%xmm11 shldl $5,%ebx,%ebx addl %esi,%eax vpxor %xmm8,%xmm7,%xmm7 xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 52(%rsp),%ebp vpsrld $30,%xmm7,%xmm8 vmovdqa %xmm9,32(%rsp) andl %ecx,%edi xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi vpslld $2,%xmm7,%xmm7 xorl %ecx,%edi shldl $5,%eax,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp addl 56(%rsp),%edx andl %ebx,%esi vpor %xmm8,%xmm7,%xmm7 xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%edi xorl %ebx,%esi shldl $5,%ebp,%ebp addl %esi,%edx xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 60(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%esi xorl %eax,%edi shldl $5,%edx,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx vpalignr $8,%xmm6,%xmm7,%xmm8 vpxor %xmm4,%xmm0,%xmm0 addl 0(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp shrdl $7,%edx,%edx vpxor %xmm1,%xmm0,%xmm0 movl %ecx,%edi xorl %ebp,%esi vpaddd %xmm7,%xmm11,%xmm9 shldl $5,%ecx,%ecx addl %esi,%ebx vpxor %xmm8,%xmm0,%xmm0 xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 4(%rsp),%eax vpsrld $30,%xmm0,%xmm8 vmovdqa %xmm9,48(%rsp) andl %edx,%edi xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi vpslld $2,%xmm0,%xmm0 xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax addl 8(%rsp),%ebp andl %ecx,%esi vpor %xmm8,%xmm0,%xmm0 xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%edi xorl %ecx,%esi shldl $5,%eax,%eax addl %esi,%ebp xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 12(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%esi xorl %ebx,%edi shldl $5,%ebp,%ebp addl %edi,%edx xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx vpalignr $8,%xmm7,%xmm0,%xmm8 vpxor %xmm5,%xmm1,%xmm1 addl 16(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax shrdl $7,%ebp,%ebp vpxor %xmm2,%xmm1,%xmm1 movl %edx,%edi xorl %eax,%esi vpaddd %xmm0,%xmm11,%xmm9 shldl $5,%edx,%edx addl %esi,%ecx vpxor %xmm8,%xmm1,%xmm1 xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 20(%rsp),%ebx vpsrld $30,%xmm1,%xmm8 vmovdqa %xmm9,0(%rsp) andl %ebp,%edi xorl %eax,%ebp shrdl $7,%edx,%edx movl %ecx,%esi vpslld $2,%xmm1,%xmm1 xorl %ebp,%edi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx addl 24(%rsp),%eax andl %edx,%esi vpor %xmm8,%xmm1,%xmm1 xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%edi xorl %edx,%esi shldl $5,%ebx,%ebx addl %esi,%eax xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 28(%rsp),%ebp andl %ecx,%edi xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi xorl %ecx,%edi shldl $5,%eax,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp vpalignr $8,%xmm0,%xmm1,%xmm8 vpxor %xmm6,%xmm2,%xmm2 addl 32(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx shrdl $7,%eax,%eax vpxor %xmm3,%xmm2,%xmm2 movl %ebp,%edi xorl %ebx,%esi vpaddd %xmm1,%xmm11,%xmm9 shldl $5,%ebp,%ebp addl %esi,%edx vpxor %xmm8,%xmm2,%xmm2 xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 36(%rsp),%ecx vpsrld $30,%xmm2,%xmm8 vmovdqa %xmm9,16(%rsp) andl %eax,%edi xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%esi vpslld $2,%xmm2,%xmm2 xorl %eax,%edi shldl $5,%edx,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx addl 40(%rsp),%ebx andl %ebp,%esi vpor %xmm8,%xmm2,%xmm2 xorl %eax,%ebp shrdl $7,%edx,%edx movl %ecx,%edi xorl %ebp,%esi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 44(%rsp),%eax andl %edx,%edi xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi addl %ebx,%eax vpalignr $8,%xmm1,%xmm2,%xmm8 vpxor %xmm7,%xmm3,%xmm3 addl 48(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax vpxor %xmm4,%xmm3,%xmm3 addl %esi,%ebp xorl %ecx,%edi vpaddd %xmm2,%xmm11,%xmm9 shrdl $7,%ebx,%ebx addl %eax,%ebp vpxor %xmm8,%xmm3,%xmm3 addl 52(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp vpsrld $30,%xmm3,%xmm8 vmovdqa %xmm9,32(%rsp) addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpslld $2,%xmm3,%xmm3 addl 56(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vpor %xmm8,%xmm3,%xmm3 addl 60(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 0(%rsp),%eax vpaddd %xmm3,%xmm11,%xmm9 xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax vmovdqa %xmm9,48(%rsp) xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 4(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 8(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx addl 12(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx cmpq %r10,%r9 je .Ldone_avx vmovdqa 64(%r14),%xmm6 vmovdqa -64(%r14),%xmm11 vmovdqu 0(%r9),%xmm0 vmovdqu 16(%r9),%xmm1 vmovdqu 32(%r9),%xmm2 vmovdqu 48(%r9),%xmm3 vpshufb %xmm6,%xmm0,%xmm0 addq $64,%r9 addl 16(%rsp),%ebx xorl %ebp,%esi vpshufb %xmm6,%xmm1,%xmm1 movl %ecx,%edi shldl $5,%ecx,%ecx vpaddd %xmm11,%xmm0,%xmm4 addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx vmovdqa %xmm4,0(%rsp) addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi vpshufb %xmm6,%xmm2,%xmm2 movl %edx,%edi shldl $5,%edx,%edx vpaddd %xmm11,%xmm1,%xmm5 addl %esi,%ecx xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vmovdqa %xmm5,16(%rsp) addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi vpshufb %xmm6,%xmm3,%xmm3 movl %ebp,%edi shldl $5,%ebp,%ebp vpaddd %xmm11,%xmm2,%xmm6 addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx vmovdqa %xmm6,32(%rsp) addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax addl 0(%r8),%eax addl 4(%r8),%esi addl 8(%r8),%ecx addl 12(%r8),%edx movl %eax,0(%r8) addl 16(%r8),%ebp movl %esi,4(%r8) movl %esi,%ebx movl %ecx,8(%r8) movl %ecx,%edi movl %edx,12(%r8) xorl %edx,%edi movl %ebp,16(%r8) andl %edi,%esi jmp .Loop_avx .align 16 .Ldone_avx: addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%rsp),%ebp xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%rsp),%ebp xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax vzeroupper addl 0(%r8),%eax addl 4(%r8),%esi addl 8(%r8),%ecx movl %eax,0(%r8) addl 12(%r8),%edx movl %esi,4(%r8) addl 16(%r8),%ebp movl %ecx,8(%r8) movl %edx,12(%r8) movl %ebp,16(%r8) movq -40(%r11),%r14 .cfi_restore %r14 movq -32(%r11),%r13 .cfi_restore %r13 movq -24(%r11),%r12 .cfi_restore %r12 movq -16(%r11),%rbp .cfi_restore %rbp movq -8(%r11),%rbx .cfi_restore %rbx leaq (%r11),%rsp .cfi_def_cfa_register %rsp .Lepilogue_avx: .byte 0xf3,0xc3 .cfi_endproc .size sha1_block_data_order_avx,.-sha1_block_data_order_avx .globl sha1_block_data_order_avx2 .hidden sha1_block_data_order_avx2 .type sha1_block_data_order_avx2,@function .align 16 sha1_block_data_order_avx2: .cfi_startproc _CET_ENDBR movq %rsp,%r11 .cfi_def_cfa_register %r11 pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 vzeroupper movq %rdi,%r8 movq %rsi,%r9 movq %rdx,%r10 leaq -640(%rsp),%rsp shlq $6,%r10 leaq 64(%r9),%r13 andq $-128,%rsp addq %r9,%r10 leaq K_XX_XX+64(%rip),%r14 movl 0(%r8),%eax cmpq %r10,%r13 cmovaeq %r9,%r13 movl 4(%r8),%ebp movl 8(%r8),%ecx movl 12(%r8),%edx movl 16(%r8),%esi vmovdqu 64(%r14),%ymm6 vmovdqu (%r9),%xmm0 vmovdqu 16(%r9),%xmm1 vmovdqu 32(%r9),%xmm2 vmovdqu 48(%r9),%xmm3 leaq 64(%r9),%r9 vinserti128 $1,(%r13),%ymm0,%ymm0 vinserti128 $1,16(%r13),%ymm1,%ymm1 vpshufb %ymm6,%ymm0,%ymm0 vinserti128 $1,32(%r13),%ymm2,%ymm2 vpshufb %ymm6,%ymm1,%ymm1 vinserti128 $1,48(%r13),%ymm3,%ymm3 vpshufb %ymm6,%ymm2,%ymm2 vmovdqu -64(%r14),%ymm11 vpshufb %ymm6,%ymm3,%ymm3 vpaddd %ymm11,%ymm0,%ymm4 vpaddd %ymm11,%ymm1,%ymm5 vmovdqu %ymm4,0(%rsp) vpaddd %ymm11,%ymm2,%ymm6 vmovdqu %ymm5,32(%rsp) vpaddd %ymm11,%ymm3,%ymm7 vmovdqu %ymm6,64(%rsp) vmovdqu %ymm7,96(%rsp) vpalignr $8,%ymm0,%ymm1,%ymm4 vpsrldq $4,%ymm3,%ymm8 vpxor %ymm0,%ymm4,%ymm4 vpxor %ymm2,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $31,%ymm4,%ymm8 vpslldq $12,%ymm4,%ymm10 vpaddd %ymm4,%ymm4,%ymm4 vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm4,%ymm4 vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm4,%ymm4 vpxor %ymm10,%ymm4,%ymm4 vpaddd %ymm11,%ymm4,%ymm9 vmovdqu %ymm9,128(%rsp) vpalignr $8,%ymm1,%ymm2,%ymm5 vpsrldq $4,%ymm4,%ymm8 vpxor %ymm1,%ymm5,%ymm5 vpxor %ymm3,%ymm8,%ymm8 vpxor %ymm8,%ymm5,%ymm5 vpsrld $31,%ymm5,%ymm8 vmovdqu -32(%r14),%ymm11 vpslldq $12,%ymm5,%ymm10 vpaddd %ymm5,%ymm5,%ymm5 vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm5,%ymm5 vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm10,%ymm5,%ymm5 vpaddd %ymm11,%ymm5,%ymm9 vmovdqu %ymm9,160(%rsp) vpalignr $8,%ymm2,%ymm3,%ymm6 vpsrldq $4,%ymm5,%ymm8 vpxor %ymm2,%ymm6,%ymm6 vpxor %ymm4,%ymm8,%ymm8 vpxor %ymm8,%ymm6,%ymm6 vpsrld $31,%ymm6,%ymm8 vpslldq $12,%ymm6,%ymm10 vpaddd %ymm6,%ymm6,%ymm6 vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm6,%ymm6 vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm6,%ymm6 vpxor %ymm10,%ymm6,%ymm6 vpaddd %ymm11,%ymm6,%ymm9 vmovdqu %ymm9,192(%rsp) vpalignr $8,%ymm3,%ymm4,%ymm7 vpsrldq $4,%ymm6,%ymm8 vpxor %ymm3,%ymm7,%ymm7 vpxor %ymm5,%ymm8,%ymm8 vpxor %ymm8,%ymm7,%ymm7 vpsrld $31,%ymm7,%ymm8 vpslldq $12,%ymm7,%ymm10 vpaddd %ymm7,%ymm7,%ymm7 vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm7,%ymm7 vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm7,%ymm7 vpxor %ymm10,%ymm7,%ymm7 vpaddd %ymm11,%ymm7,%ymm9 vmovdqu %ymm9,224(%rsp) leaq 128(%rsp),%r13 jmp .Loop_avx2 .align 32 .Loop_avx2: rorxl $2,%ebp,%ebx andnl %edx,%ebp,%edi andl %ecx,%ebp xorl %edi,%ebp jmp .Lalign32_1 .align 32 .Lalign32_1: vpalignr $8,%ymm6,%ymm7,%ymm8 vpxor %ymm4,%ymm0,%ymm0 addl -128(%r13),%esi andnl %ecx,%eax,%edi vpxor %ymm1,%ymm0,%ymm0 addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp vpxor %ymm8,%ymm0,%ymm0 andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax vpsrld $30,%ymm0,%ymm8 vpslld $2,%ymm0,%ymm0 addl -124(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi vpor %ymm8,%ymm0,%ymm0 addl %r12d,%edx xorl %edi,%esi addl -120(%r13),%ecx andnl %ebp,%edx,%edi vpaddd %ymm11,%ymm0,%ymm9 addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx vmovdqu %ymm9,256(%rsp) addl %r12d,%ecx xorl %edi,%edx addl -116(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx addl -96(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx vpalignr $8,%ymm7,%ymm0,%ymm8 vpxor %ymm5,%ymm1,%ymm1 addl -92(%r13),%eax andnl %edx,%ebp,%edi vpxor %ymm2,%ymm1,%ymm1 addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx vpxor %ymm8,%ymm1,%ymm1 andl %ecx,%ebp addl %r12d,%eax xorl %edi,%ebp vpsrld $30,%ymm1,%ymm8 vpslld $2,%ymm1,%ymm1 addl -88(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax vpor %ymm8,%ymm1,%ymm1 addl %r12d,%esi xorl %edi,%eax addl -84(%r13),%edx andnl %ebx,%esi,%edi vpaddd %ymm11,%ymm1,%ymm9 addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi vmovdqu %ymm9,288(%rsp) addl %r12d,%edx xorl %edi,%esi addl -64(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx addl -60(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx vpalignr $8,%ymm0,%ymm1,%ymm8 vpxor %ymm6,%ymm2,%ymm2 addl -56(%r13),%ebp andnl %esi,%ebx,%edi vpxor %ymm3,%ymm2,%ymm2 vmovdqu 0(%r14),%ymm11 addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx vpxor %ymm8,%ymm2,%ymm2 andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx vpsrld $30,%ymm2,%ymm8 vpslld $2,%ymm2,%ymm2 addl -52(%r13),%eax andnl %edx,%ebp,%edi addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp vpor %ymm8,%ymm2,%ymm2 addl %r12d,%eax xorl %edi,%ebp addl -32(%r13),%esi andnl %ecx,%eax,%edi vpaddd %ymm11,%ymm2,%ymm9 addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax vmovdqu %ymm9,320(%rsp) addl %r12d,%esi xorl %edi,%eax addl -28(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi addl %r12d,%edx xorl %edi,%esi addl -24(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx vpalignr $8,%ymm1,%ymm2,%ymm8 vpxor %ymm7,%ymm3,%ymm3 addl -20(%r13),%ebx andnl %eax,%ecx,%edi vpxor %ymm4,%ymm3,%ymm3 addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx vpxor %ymm8,%ymm3,%ymm3 andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx vpsrld $30,%ymm3,%ymm8 vpslld $2,%ymm3,%ymm3 addl 0(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx vpor %ymm8,%ymm3,%ymm3 addl %r12d,%ebp xorl %edi,%ebx addl 4(%r13),%eax andnl %edx,%ebp,%edi vpaddd %ymm11,%ymm3,%ymm9 addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp vmovdqu %ymm9,352(%rsp) addl %r12d,%eax xorl %edi,%ebp addl 8(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl 12(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi vpalignr $8,%ymm2,%ymm3,%ymm8 vpxor %ymm0,%ymm4,%ymm4 addl 32(%r13),%ecx leal (%rcx,%rsi,1),%ecx vpxor %ymm5,%ymm4,%ymm4 rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx vpxor %ymm8,%ymm4,%ymm4 addl %r12d,%ecx xorl %ebp,%edx addl 36(%r13),%ebx vpsrld $30,%ymm4,%ymm8 vpslld $2,%ymm4,%ymm4 leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx vpor %ymm8,%ymm4,%ymm4 addl 40(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx vpaddd %ymm11,%ymm4,%ymm9 xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl 44(%r13),%eax vmovdqu %ymm9,384(%rsp) leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl 64(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax vpalignr $8,%ymm3,%ymm4,%ymm8 vpxor %ymm1,%ymm5,%ymm5 addl 68(%r13),%edx leal (%rdx,%rax,1),%edx vpxor %ymm6,%ymm5,%ymm5 rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi vpxor %ymm8,%ymm5,%ymm5 addl %r12d,%edx xorl %ebx,%esi addl 72(%r13),%ecx vpsrld $30,%ymm5,%ymm8 vpslld $2,%ymm5,%ymm5 leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx vpor %ymm8,%ymm5,%ymm5 addl 76(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx vpaddd %ymm11,%ymm5,%ymm9 xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl 96(%r13),%ebp vmovdqu %ymm9,416(%rsp) leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl 100(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp vpalignr $8,%ymm4,%ymm5,%ymm8 vpxor %ymm2,%ymm6,%ymm6 addl 104(%r13),%esi leal (%rsi,%rbp,1),%esi vpxor %ymm7,%ymm6,%ymm6 rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax vpxor %ymm8,%ymm6,%ymm6 addl %r12d,%esi xorl %ecx,%eax addl 108(%r13),%edx leaq 256(%r13),%r13 vpsrld $30,%ymm6,%ymm8 vpslld $2,%ymm6,%ymm6 leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi vpor %ymm8,%ymm6,%ymm6 addl -128(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi vpaddd %ymm11,%ymm6,%ymm9 xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -124(%r13),%ebx vmovdqu %ymm9,448(%rsp) leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -120(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx vpalignr $8,%ymm5,%ymm6,%ymm8 vpxor %ymm3,%ymm7,%ymm7 addl -116(%r13),%eax leal (%rax,%rbx,1),%eax vpxor %ymm0,%ymm7,%ymm7 vmovdqu 32(%r14),%ymm11 rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp vpxor %ymm8,%ymm7,%ymm7 addl %r12d,%eax xorl %edx,%ebp addl -96(%r13),%esi vpsrld $30,%ymm7,%ymm8 vpslld $2,%ymm7,%ymm7 leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax vpor %ymm8,%ymm7,%ymm7 addl -92(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax vpaddd %ymm11,%ymm7,%ymm9 xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -88(%r13),%ecx vmovdqu %ymm9,480(%rsp) leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -84(%r13),%ebx movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx jmp .Lalign32_2 .align 32 .Lalign32_2: vpalignr $8,%ymm6,%ymm7,%ymm8 vpxor %ymm4,%ymm0,%ymm0 addl -64(%r13),%ebp xorl %esi,%ecx vpxor %ymm1,%ymm0,%ymm0 movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp vpxor %ymm8,%ymm0,%ymm0 rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx vpsrld $30,%ymm0,%ymm8 vpslld $2,%ymm0,%ymm0 addl %r12d,%ebp andl %edi,%ebx addl -60(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi vpor %ymm8,%ymm0,%ymm0 leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp vpaddd %ymm11,%ymm0,%ymm9 addl %r12d,%eax andl %edi,%ebp addl -56(%r13),%esi xorl %ecx,%ebp vmovdqu %ymm9,512(%rsp) movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax addl -52(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx andl %edi,%esi addl -32(%r13),%ecx xorl %ebp,%esi movl %eax,%edi xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx andl %edi,%edx vpalignr $8,%ymm7,%ymm0,%ymm8 vpxor %ymm5,%ymm1,%ymm1 addl -28(%r13),%ebx xorl %eax,%edx vpxor %ymm2,%ymm1,%ymm1 movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx vpxor %ymm8,%ymm1,%ymm1 rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx vpsrld $30,%ymm1,%ymm8 vpslld $2,%ymm1,%ymm1 addl %r12d,%ebx andl %edi,%ecx addl -24(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi vpor %ymm8,%ymm1,%ymm1 leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx vpaddd %ymm11,%ymm1,%ymm9 addl %r12d,%ebp andl %edi,%ebx addl -20(%r13),%eax xorl %edx,%ebx vmovdqu %ymm9,544(%rsp) movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl 0(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax addl 4(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx andl %edi,%esi vpalignr $8,%ymm0,%ymm1,%ymm8 vpxor %ymm6,%ymm2,%ymm2 addl 8(%r13),%ecx xorl %ebp,%esi vpxor %ymm3,%ymm2,%ymm2 movl %eax,%edi xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx vpxor %ymm8,%ymm2,%ymm2 rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx vpsrld $30,%ymm2,%ymm8 vpslld $2,%ymm2,%ymm2 addl %r12d,%ecx andl %edi,%edx addl 12(%r13),%ebx xorl %eax,%edx movl %esi,%edi xorl %eax,%edi vpor %ymm8,%ymm2,%ymm2 leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx vpaddd %ymm11,%ymm2,%ymm9 addl %r12d,%ebx andl %edi,%ecx addl 32(%r13),%ebp xorl %esi,%ecx vmovdqu %ymm9,576(%rsp) movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl 36(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl 40(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax vpalignr $8,%ymm1,%ymm2,%ymm8 vpxor %ymm7,%ymm3,%ymm3 addl 44(%r13),%edx xorl %ebx,%eax vpxor %ymm4,%ymm3,%ymm3 movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx vpxor %ymm8,%ymm3,%ymm3 rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi vpsrld $30,%ymm3,%ymm8 vpslld $2,%ymm3,%ymm3 addl %r12d,%edx andl %edi,%esi addl 64(%r13),%ecx xorl %ebp,%esi movl %eax,%edi xorl %ebp,%edi vpor %ymm8,%ymm3,%ymm3 leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx vpaddd %ymm11,%ymm3,%ymm9 addl %r12d,%ecx andl %edi,%edx addl 68(%r13),%ebx xorl %eax,%edx vmovdqu %ymm9,608(%rsp) movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx addl 72(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl 76(%r13),%eax xorl %edx,%ebx leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl 96(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl 100(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl 104(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl 108(%r13),%ebx leaq 256(%r13),%r13 leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -128(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl -124(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -120(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -116(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -96(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -92(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -88(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl -84(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -64(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -60(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -56(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -52(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -32(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl -28(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -24(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -20(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d addl %r12d,%edx leaq 128(%r9),%r13 leaq 128(%r9),%rdi cmpq %r10,%r13 cmovaeq %r9,%r13 addl 0(%r8),%edx addl 4(%r8),%esi addl 8(%r8),%ebp movl %edx,0(%r8) addl 12(%r8),%ebx movl %esi,4(%r8) movl %edx,%eax addl 16(%r8),%ecx movl %ebp,%r12d movl %ebp,8(%r8) movl %ebx,%edx movl %ebx,12(%r8) movl %esi,%ebp movl %ecx,16(%r8) movl %ecx,%esi movl %r12d,%ecx cmpq %r10,%r9 je .Ldone_avx2 vmovdqu 64(%r14),%ymm6 cmpq %r10,%rdi ja .Last_avx2 vmovdqu -64(%rdi),%xmm0 vmovdqu -48(%rdi),%xmm1 vmovdqu -32(%rdi),%xmm2 vmovdqu -16(%rdi),%xmm3 vinserti128 $1,0(%r13),%ymm0,%ymm0 vinserti128 $1,16(%r13),%ymm1,%ymm1 vinserti128 $1,32(%r13),%ymm2,%ymm2 vinserti128 $1,48(%r13),%ymm3,%ymm3 jmp .Last_avx2 .align 32 .Last_avx2: leaq 128+16(%rsp),%r13 rorxl $2,%ebp,%ebx andnl %edx,%ebp,%edi andl %ecx,%ebp xorl %edi,%ebp subq $-128,%r9 addl -128(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl -124(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi addl %r12d,%edx xorl %edi,%esi addl -120(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx addl -116(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx addl -96(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx addl -92(%r13),%eax andnl %edx,%ebp,%edi addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp addl %r12d,%eax xorl %edi,%ebp addl -88(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl -84(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi addl %r12d,%edx xorl %edi,%esi addl -64(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx addl -60(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx addl -56(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx addl -52(%r13),%eax andnl %edx,%ebp,%edi addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp addl %r12d,%eax xorl %edi,%ebp addl -32(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl -28(%r13),%edx andnl %ebx,%esi,%edi addl %eax,%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax andl %ebp,%esi addl %r12d,%edx xorl %edi,%esi addl -24(%r13),%ecx andnl %ebp,%edx,%edi addl %esi,%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi andl %eax,%edx addl %r12d,%ecx xorl %edi,%edx addl -20(%r13),%ebx andnl %eax,%ecx,%edi addl %edx,%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx andl %esi,%ecx addl %r12d,%ebx xorl %edi,%ecx addl 0(%r13),%ebp andnl %esi,%ebx,%edi addl %ecx,%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx andl %edx,%ebx addl %r12d,%ebp xorl %edi,%ebx addl 4(%r13),%eax andnl %edx,%ebp,%edi addl %ebx,%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx andl %ecx,%ebp addl %r12d,%eax xorl %edi,%ebp addl 8(%r13),%esi andnl %ecx,%eax,%edi addl %ebp,%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp andl %ebx,%eax addl %r12d,%esi xorl %edi,%eax addl 12(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl 32(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl 36(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl 40(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl 44(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl 64(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax vmovdqu -64(%r14),%ymm11 vpshufb %ymm6,%ymm0,%ymm0 addl 68(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl 72(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl 76(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl 96(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl 100(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp vpshufb %ymm6,%ymm1,%ymm1 vpaddd %ymm11,%ymm0,%ymm8 addl 104(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl 108(%r13),%edx leaq 256(%r13),%r13 leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -128(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -124(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -120(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx vmovdqu %ymm8,0(%rsp) vpshufb %ymm6,%ymm2,%ymm2 vpaddd %ymm11,%ymm1,%ymm9 addl -116(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -96(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -92(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi addl -88(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -84(%r13),%ebx movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx vmovdqu %ymm9,32(%rsp) vpshufb %ymm6,%ymm3,%ymm3 vpaddd %ymm11,%ymm2,%ymm6 addl -64(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl -60(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl -56(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax addl -52(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx andl %edi,%esi addl -32(%r13),%ecx xorl %ebp,%esi movl %eax,%edi xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx andl %edi,%edx jmp .Lalign32_3 .align 32 .Lalign32_3: vmovdqu %ymm6,64(%rsp) vpaddd %ymm11,%ymm3,%ymm7 addl -28(%r13),%ebx xorl %eax,%edx movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx addl -24(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl -20(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl 0(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax addl 4(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx andl %edi,%esi vmovdqu %ymm7,96(%rsp) addl 8(%r13),%ecx xorl %ebp,%esi movl %eax,%edi xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx andl %edi,%edx addl 12(%r13),%ebx xorl %eax,%edx movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx andl %edi,%ecx addl 32(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl 36(%r13),%eax xorl %edx,%ebx movl %ecx,%edi xorl %edx,%edi leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax andl %edi,%ebp addl 40(%r13),%esi xorl %ecx,%ebp movl %ebx,%edi xorl %ecx,%edi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi andl %edi,%eax vpalignr $8,%ymm0,%ymm1,%ymm4 addl 44(%r13),%edx xorl %ebx,%eax movl %ebp,%edi xorl %ebx,%edi vpsrldq $4,%ymm3,%ymm8 leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax vpxor %ymm0,%ymm4,%ymm4 vpxor %ymm2,%ymm8,%ymm8 xorl %ebp,%esi addl %r12d,%edx vpxor %ymm8,%ymm4,%ymm4 andl %edi,%esi addl 64(%r13),%ecx xorl %ebp,%esi movl %eax,%edi vpsrld $31,%ymm4,%ymm8 xorl %ebp,%edi leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d vpslldq $12,%ymm4,%ymm10 vpaddd %ymm4,%ymm4,%ymm4 rorxl $2,%edx,%esi xorl %eax,%edx vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm4,%ymm4 addl %r12d,%ecx andl %edi,%edx vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm4,%ymm4 addl 68(%r13),%ebx xorl %eax,%edx vpxor %ymm10,%ymm4,%ymm4 movl %esi,%edi xorl %eax,%edi leal (%rbx,%rdx,1),%ebx vpaddd %ymm11,%ymm4,%ymm9 rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx vmovdqu %ymm9,128(%rsp) addl %r12d,%ebx andl %edi,%ecx addl 72(%r13),%ebp xorl %esi,%ecx movl %edx,%edi xorl %esi,%edi leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp andl %edi,%ebx addl 76(%r13),%eax xorl %edx,%ebx leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp vpalignr $8,%ymm1,%ymm2,%ymm5 addl 96(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp vpsrldq $4,%ymm4,%ymm8 xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax vpxor %ymm1,%ymm5,%ymm5 vpxor %ymm3,%ymm8,%ymm8 addl 100(%r13),%edx leal (%rdx,%rax,1),%edx vpxor %ymm8,%ymm5,%ymm5 rorxl $27,%esi,%r12d rorxl $2,%esi,%eax xorl %ebp,%esi addl %r12d,%edx vpsrld $31,%ymm5,%ymm8 vmovdqu -32(%r14),%ymm11 xorl %ebx,%esi addl 104(%r13),%ecx leal (%rcx,%rsi,1),%ecx vpslldq $12,%ymm5,%ymm10 vpaddd %ymm5,%ymm5,%ymm5 rorxl $27,%edx,%r12d rorxl $2,%edx,%esi vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm5,%ymm5 xorl %eax,%edx addl %r12d,%ecx vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm5,%ymm5 xorl %ebp,%edx addl 108(%r13),%ebx leaq 256(%r13),%r13 vpxor %ymm10,%ymm5,%ymm5 leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx vpaddd %ymm11,%ymm5,%ymm9 xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx vmovdqu %ymm9,160(%rsp) addl -128(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx vpalignr $8,%ymm2,%ymm3,%ymm6 addl -124(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx vpsrldq $4,%ymm5,%ymm8 xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp vpxor %ymm2,%ymm6,%ymm6 vpxor %ymm4,%ymm8,%ymm8 addl -120(%r13),%esi leal (%rsi,%rbp,1),%esi vpxor %ymm8,%ymm6,%ymm6 rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi vpsrld $31,%ymm6,%ymm8 xorl %ecx,%eax addl -116(%r13),%edx leal (%rdx,%rax,1),%edx vpslldq $12,%ymm6,%ymm10 vpaddd %ymm6,%ymm6,%ymm6 rorxl $27,%esi,%r12d rorxl $2,%esi,%eax vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm6,%ymm6 xorl %ebp,%esi addl %r12d,%edx vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm6,%ymm6 xorl %ebx,%esi addl -96(%r13),%ecx vpxor %ymm10,%ymm6,%ymm6 leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi vpaddd %ymm11,%ymm6,%ymm9 xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx vmovdqu %ymm9,192(%rsp) addl -92(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx vpalignr $8,%ymm3,%ymm4,%ymm7 addl -88(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx vpsrldq $4,%ymm6,%ymm8 xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx vpxor %ymm3,%ymm7,%ymm7 vpxor %ymm5,%ymm8,%ymm8 addl -84(%r13),%eax leal (%rax,%rbx,1),%eax vpxor %ymm8,%ymm7,%ymm7 rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax vpsrld $31,%ymm7,%ymm8 xorl %edx,%ebp addl -64(%r13),%esi leal (%rsi,%rbp,1),%esi vpslldq $12,%ymm7,%ymm10 vpaddd %ymm7,%ymm7,%ymm7 rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp vpsrld $30,%ymm10,%ymm9 vpor %ymm8,%ymm7,%ymm7 xorl %ebx,%eax addl %r12d,%esi vpslld $2,%ymm10,%ymm10 vpxor %ymm9,%ymm7,%ymm7 xorl %ecx,%eax addl -60(%r13),%edx vpxor %ymm10,%ymm7,%ymm7 leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d rorxl $2,%esi,%eax vpaddd %ymm11,%ymm7,%ymm9 xorl %ebp,%esi addl %r12d,%edx xorl %ebx,%esi vmovdqu %ymm9,224(%rsp) addl -56(%r13),%ecx leal (%rcx,%rsi,1),%ecx rorxl $27,%edx,%r12d rorxl $2,%edx,%esi xorl %eax,%edx addl %r12d,%ecx xorl %ebp,%edx addl -52(%r13),%ebx leal (%rbx,%rdx,1),%ebx rorxl $27,%ecx,%r12d rorxl $2,%ecx,%edx xorl %esi,%ecx addl %r12d,%ebx xorl %eax,%ecx addl -32(%r13),%ebp leal (%rcx,%rbp,1),%ebp rorxl $27,%ebx,%r12d rorxl $2,%ebx,%ecx xorl %edx,%ebx addl %r12d,%ebp xorl %esi,%ebx addl -28(%r13),%eax leal (%rax,%rbx,1),%eax rorxl $27,%ebp,%r12d rorxl $2,%ebp,%ebx xorl %ecx,%ebp addl %r12d,%eax xorl %edx,%ebp addl -24(%r13),%esi leal (%rsi,%rbp,1),%esi rorxl $27,%eax,%r12d rorxl $2,%eax,%ebp xorl %ebx,%eax addl %r12d,%esi xorl %ecx,%eax addl -20(%r13),%edx leal (%rdx,%rax,1),%edx rorxl $27,%esi,%r12d addl %r12d,%edx leaq 128(%rsp),%r13 addl 0(%r8),%edx addl 4(%r8),%esi addl 8(%r8),%ebp movl %edx,0(%r8) addl 12(%r8),%ebx movl %esi,4(%r8) movl %edx,%eax addl 16(%r8),%ecx movl %ebp,%r12d movl %ebp,8(%r8) movl %ebx,%edx movl %ebx,12(%r8) movl %esi,%ebp movl %ecx,16(%r8) movl %ecx,%esi movl %r12d,%ecx cmpq %r10,%r9 jbe .Loop_avx2 .Ldone_avx2: vzeroupper movq -40(%r11),%r14 .cfi_restore %r14 movq -32(%r11),%r13 .cfi_restore %r13 movq -24(%r11),%r12 .cfi_restore %r12 movq -16(%r11),%rbp .cfi_restore %rbp movq -8(%r11),%rbx .cfi_restore %rbx leaq (%r11),%rsp .cfi_def_cfa_register %rsp .Lepilogue_avx2: .byte 0xf3,0xc3 .cfi_endproc .size sha1_block_data_order_avx2,.-sha1_block_data_order_avx2 .section .rodata .align 64 K_XX_XX: .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .byte 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0 .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 64 .text #endif
marvin-hansen/iggy-streaming-system
78,455
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/p256-x86_64-asm.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .extern OPENSSL_ia32cap_P .hidden OPENSSL_ia32cap_P .section .rodata .align 64 .Lpoly: .quad 0xffffffffffffffff, 0x00000000ffffffff, 0x0000000000000000, 0xffffffff00000001 .LOne: .long 1,1,1,1,1,1,1,1 .LTwo: .long 2,2,2,2,2,2,2,2 .LThree: .long 3,3,3,3,3,3,3,3 .LONE_mont: .quad 0x0000000000000001, 0xffffffff00000000, 0xffffffffffffffff, 0x00000000fffffffe .Lord: .quad 0xf3b9cac2fc632551, 0xbce6faada7179e84, 0xffffffffffffffff, 0xffffffff00000000 .LordK: .quad 0xccd1c8aaee00bc4f .text .globl ecp_nistz256_neg .hidden ecp_nistz256_neg .type ecp_nistz256_neg,@function .align 32 ecp_nistz256_neg: .cfi_startproc _CET_ENDBR pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-16 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-24 .Lneg_body: xorq %r8,%r8 xorq %r9,%r9 xorq %r10,%r10 xorq %r11,%r11 xorq %r13,%r13 subq 0(%rsi),%r8 sbbq 8(%rsi),%r9 sbbq 16(%rsi),%r10 movq %r8,%rax sbbq 24(%rsi),%r11 leaq .Lpoly(%rip),%rsi movq %r9,%rdx sbbq $0,%r13 addq 0(%rsi),%r8 movq %r10,%rcx adcq 8(%rsi),%r9 adcq 16(%rsi),%r10 movq %r11,%r12 adcq 24(%rsi),%r11 testq %r13,%r13 cmovzq %rax,%r8 cmovzq %rdx,%r9 movq %r8,0(%rdi) cmovzq %rcx,%r10 movq %r9,8(%rdi) cmovzq %r12,%r11 movq %r10,16(%rdi) movq %r11,24(%rdi) movq 0(%rsp),%r13 .cfi_restore %r13 movq 8(%rsp),%r12 .cfi_restore %r12 leaq 16(%rsp),%rsp .cfi_adjust_cfa_offset -16 .Lneg_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_neg,.-ecp_nistz256_neg .globl ecp_nistz256_ord_mul_mont .hidden ecp_nistz256_ord_mul_mont .type ecp_nistz256_ord_mul_mont,@function .align 32 ecp_nistz256_ord_mul_mont: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je .Lecp_nistz256_ord_mul_montx #endif pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lord_mul_body: movq 0(%rdx),%rax movq %rdx,%rbx leaq .Lord(%rip),%r14 movq .LordK(%rip),%r15 movq %rax,%rcx mulq 0(%rsi) movq %rax,%r8 movq %rcx,%rax movq %rdx,%r9 mulq 8(%rsi) addq %rax,%r9 movq %rcx,%rax adcq $0,%rdx movq %rdx,%r10 mulq 16(%rsi) addq %rax,%r10 movq %rcx,%rax adcq $0,%rdx movq %r8,%r13 imulq %r15,%r8 movq %rdx,%r11 mulq 24(%rsi) addq %rax,%r11 movq %r8,%rax adcq $0,%rdx movq %rdx,%r12 mulq 0(%r14) movq %r8,%rbp addq %rax,%r13 movq %r8,%rax adcq $0,%rdx movq %rdx,%rcx subq %r8,%r10 sbbq $0,%r8 mulq 8(%r14) addq %rcx,%r9 adcq $0,%rdx addq %rax,%r9 movq %rbp,%rax adcq %rdx,%r10 movq %rbp,%rdx adcq $0,%r8 shlq $32,%rax shrq $32,%rdx subq %rax,%r11 movq 8(%rbx),%rax sbbq %rdx,%rbp addq %r8,%r11 adcq %rbp,%r12 adcq $0,%r13 movq %rax,%rcx mulq 0(%rsi) addq %rax,%r9 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 8(%rsi) addq %rbp,%r10 adcq $0,%rdx addq %rax,%r10 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 16(%rsi) addq %rbp,%r11 adcq $0,%rdx addq %rax,%r11 movq %rcx,%rax adcq $0,%rdx movq %r9,%rcx imulq %r15,%r9 movq %rdx,%rbp mulq 24(%rsi) addq %rbp,%r12 adcq $0,%rdx xorq %r8,%r8 addq %rax,%r12 movq %r9,%rax adcq %rdx,%r13 adcq $0,%r8 mulq 0(%r14) movq %r9,%rbp addq %rax,%rcx movq %r9,%rax adcq %rdx,%rcx subq %r9,%r11 sbbq $0,%r9 mulq 8(%r14) addq %rcx,%r10 adcq $0,%rdx addq %rax,%r10 movq %rbp,%rax adcq %rdx,%r11 movq %rbp,%rdx adcq $0,%r9 shlq $32,%rax shrq $32,%rdx subq %rax,%r12 movq 16(%rbx),%rax sbbq %rdx,%rbp addq %r9,%r12 adcq %rbp,%r13 adcq $0,%r8 movq %rax,%rcx mulq 0(%rsi) addq %rax,%r10 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 8(%rsi) addq %rbp,%r11 adcq $0,%rdx addq %rax,%r11 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 16(%rsi) addq %rbp,%r12 adcq $0,%rdx addq %rax,%r12 movq %rcx,%rax adcq $0,%rdx movq %r10,%rcx imulq %r15,%r10 movq %rdx,%rbp mulq 24(%rsi) addq %rbp,%r13 adcq $0,%rdx xorq %r9,%r9 addq %rax,%r13 movq %r10,%rax adcq %rdx,%r8 adcq $0,%r9 mulq 0(%r14) movq %r10,%rbp addq %rax,%rcx movq %r10,%rax adcq %rdx,%rcx subq %r10,%r12 sbbq $0,%r10 mulq 8(%r14) addq %rcx,%r11 adcq $0,%rdx addq %rax,%r11 movq %rbp,%rax adcq %rdx,%r12 movq %rbp,%rdx adcq $0,%r10 shlq $32,%rax shrq $32,%rdx subq %rax,%r13 movq 24(%rbx),%rax sbbq %rdx,%rbp addq %r10,%r13 adcq %rbp,%r8 adcq $0,%r9 movq %rax,%rcx mulq 0(%rsi) addq %rax,%r11 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 8(%rsi) addq %rbp,%r12 adcq $0,%rdx addq %rax,%r12 movq %rcx,%rax adcq $0,%rdx movq %rdx,%rbp mulq 16(%rsi) addq %rbp,%r13 adcq $0,%rdx addq %rax,%r13 movq %rcx,%rax adcq $0,%rdx movq %r11,%rcx imulq %r15,%r11 movq %rdx,%rbp mulq 24(%rsi) addq %rbp,%r8 adcq $0,%rdx xorq %r10,%r10 addq %rax,%r8 movq %r11,%rax adcq %rdx,%r9 adcq $0,%r10 mulq 0(%r14) movq %r11,%rbp addq %rax,%rcx movq %r11,%rax adcq %rdx,%rcx subq %r11,%r13 sbbq $0,%r11 mulq 8(%r14) addq %rcx,%r12 adcq $0,%rdx addq %rax,%r12 movq %rbp,%rax adcq %rdx,%r13 movq %rbp,%rdx adcq $0,%r11 shlq $32,%rax shrq $32,%rdx subq %rax,%r8 sbbq %rdx,%rbp addq %r11,%r8 adcq %rbp,%r9 adcq $0,%r10 movq %r12,%rsi subq 0(%r14),%r12 movq %r13,%r11 sbbq 8(%r14),%r13 movq %r8,%rcx sbbq 16(%r14),%r8 movq %r9,%rbp sbbq 24(%r14),%r9 sbbq $0,%r10 cmovcq %rsi,%r12 cmovcq %r11,%r13 cmovcq %rcx,%r8 cmovcq %rbp,%r9 movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 0(%rsp),%r15 .cfi_restore %r15 movq 8(%rsp),%r14 .cfi_restore %r14 movq 16(%rsp),%r13 .cfi_restore %r13 movq 24(%rsp),%r12 .cfi_restore %r12 movq 32(%rsp),%rbx .cfi_restore %rbx movq 40(%rsp),%rbp .cfi_restore %rbp leaq 48(%rsp),%rsp .cfi_adjust_cfa_offset -48 .Lord_mul_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_ord_mul_mont,.-ecp_nistz256_ord_mul_mont .globl ecp_nistz256_ord_sqr_mont .hidden ecp_nistz256_ord_sqr_mont .type ecp_nistz256_ord_sqr_mont,@function .align 32 ecp_nistz256_ord_sqr_mont: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je .Lecp_nistz256_ord_sqr_montx #endif pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lord_sqr_body: movq 0(%rsi),%r8 movq 8(%rsi),%rax movq 16(%rsi),%r14 movq 24(%rsi),%r15 leaq .Lord(%rip),%rsi movq %rdx,%rbx jmp .Loop_ord_sqr .align 32 .Loop_ord_sqr: movq %rax,%rbp mulq %r8 movq %rax,%r9 .byte 102,72,15,110,205 movq %r14,%rax movq %rdx,%r10 mulq %r8 addq %rax,%r10 movq %r15,%rax .byte 102,73,15,110,214 adcq $0,%rdx movq %rdx,%r11 mulq %r8 addq %rax,%r11 movq %r15,%rax .byte 102,73,15,110,223 adcq $0,%rdx movq %rdx,%r12 mulq %r14 movq %rax,%r13 movq %r14,%rax movq %rdx,%r14 mulq %rbp addq %rax,%r11 movq %r15,%rax adcq $0,%rdx movq %rdx,%r15 mulq %rbp addq %rax,%r12 adcq $0,%rdx addq %r15,%r12 adcq %rdx,%r13 adcq $0,%r14 xorq %r15,%r15 movq %r8,%rax addq %r9,%r9 adcq %r10,%r10 adcq %r11,%r11 adcq %r12,%r12 adcq %r13,%r13 adcq %r14,%r14 adcq $0,%r15 mulq %rax movq %rax,%r8 .byte 102,72,15,126,200 movq %rdx,%rbp mulq %rax addq %rbp,%r9 adcq %rax,%r10 .byte 102,72,15,126,208 adcq $0,%rdx movq %rdx,%rbp mulq %rax addq %rbp,%r11 adcq %rax,%r12 .byte 102,72,15,126,216 adcq $0,%rdx movq %rdx,%rbp movq %r8,%rcx imulq 32(%rsi),%r8 mulq %rax addq %rbp,%r13 adcq %rax,%r14 movq 0(%rsi),%rax adcq %rdx,%r15 mulq %r8 movq %r8,%rbp addq %rax,%rcx movq 8(%rsi),%rax adcq %rdx,%rcx subq %r8,%r10 sbbq $0,%rbp mulq %r8 addq %rcx,%r9 adcq $0,%rdx addq %rax,%r9 movq %r8,%rax adcq %rdx,%r10 movq %r8,%rdx adcq $0,%rbp movq %r9,%rcx imulq 32(%rsi),%r9 shlq $32,%rax shrq $32,%rdx subq %rax,%r11 movq 0(%rsi),%rax sbbq %rdx,%r8 addq %rbp,%r11 adcq $0,%r8 mulq %r9 movq %r9,%rbp addq %rax,%rcx movq 8(%rsi),%rax adcq %rdx,%rcx subq %r9,%r11 sbbq $0,%rbp mulq %r9 addq %rcx,%r10 adcq $0,%rdx addq %rax,%r10 movq %r9,%rax adcq %rdx,%r11 movq %r9,%rdx adcq $0,%rbp movq %r10,%rcx imulq 32(%rsi),%r10 shlq $32,%rax shrq $32,%rdx subq %rax,%r8 movq 0(%rsi),%rax sbbq %rdx,%r9 addq %rbp,%r8 adcq $0,%r9 mulq %r10 movq %r10,%rbp addq %rax,%rcx movq 8(%rsi),%rax adcq %rdx,%rcx subq %r10,%r8 sbbq $0,%rbp mulq %r10 addq %rcx,%r11 adcq $0,%rdx addq %rax,%r11 movq %r10,%rax adcq %rdx,%r8 movq %r10,%rdx adcq $0,%rbp movq %r11,%rcx imulq 32(%rsi),%r11 shlq $32,%rax shrq $32,%rdx subq %rax,%r9 movq 0(%rsi),%rax sbbq %rdx,%r10 addq %rbp,%r9 adcq $0,%r10 mulq %r11 movq %r11,%rbp addq %rax,%rcx movq 8(%rsi),%rax adcq %rdx,%rcx subq %r11,%r9 sbbq $0,%rbp mulq %r11 addq %rcx,%r8 adcq $0,%rdx addq %rax,%r8 movq %r11,%rax adcq %rdx,%r9 movq %r11,%rdx adcq $0,%rbp shlq $32,%rax shrq $32,%rdx subq %rax,%r10 sbbq %rdx,%r11 addq %rbp,%r10 adcq $0,%r11 xorq %rdx,%rdx addq %r12,%r8 adcq %r13,%r9 movq %r8,%r12 adcq %r14,%r10 adcq %r15,%r11 movq %r9,%rax adcq $0,%rdx subq 0(%rsi),%r8 movq %r10,%r14 sbbq 8(%rsi),%r9 sbbq 16(%rsi),%r10 movq %r11,%r15 sbbq 24(%rsi),%r11 sbbq $0,%rdx cmovcq %r12,%r8 cmovncq %r9,%rax cmovncq %r10,%r14 cmovncq %r11,%r15 decq %rbx jnz .Loop_ord_sqr movq %r8,0(%rdi) movq %rax,8(%rdi) pxor %xmm1,%xmm1 movq %r14,16(%rdi) pxor %xmm2,%xmm2 movq %r15,24(%rdi) pxor %xmm3,%xmm3 movq 0(%rsp),%r15 .cfi_restore %r15 movq 8(%rsp),%r14 .cfi_restore %r14 movq 16(%rsp),%r13 .cfi_restore %r13 movq 24(%rsp),%r12 .cfi_restore %r12 movq 32(%rsp),%rbx .cfi_restore %rbx movq 40(%rsp),%rbp .cfi_restore %rbp leaq 48(%rsp),%rsp .cfi_adjust_cfa_offset -48 .Lord_sqr_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_ord_sqr_mont,.-ecp_nistz256_ord_sqr_mont #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .type ecp_nistz256_ord_mul_montx,@function .align 32 ecp_nistz256_ord_mul_montx: .cfi_startproc .Lecp_nistz256_ord_mul_montx: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lord_mulx_body: movq %rdx,%rbx movq 0(%rdx),%rdx movq 0(%rsi),%r9 movq 8(%rsi),%r10 movq 16(%rsi),%r11 movq 24(%rsi),%r12 leaq -128(%rsi),%rsi leaq .Lord-128(%rip),%r14 movq .LordK(%rip),%r15 mulxq %r9,%r8,%r9 mulxq %r10,%rcx,%r10 mulxq %r11,%rbp,%r11 addq %rcx,%r9 mulxq %r12,%rcx,%r12 movq %r8,%rdx mulxq %r15,%rdx,%rax adcq %rbp,%r10 adcq %rcx,%r11 adcq $0,%r12 xorq %r13,%r13 mulxq 0+128(%r14),%rcx,%rbp adcxq %rcx,%r8 adoxq %rbp,%r9 mulxq 8+128(%r14),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 16+128(%r14),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 24+128(%r14),%rcx,%rbp movq 8(%rbx),%rdx adcxq %rcx,%r11 adoxq %rbp,%r12 adcxq %r8,%r12 adoxq %r8,%r13 adcq $0,%r13 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 24+128(%rsi),%rcx,%rbp movq %r9,%rdx mulxq %r15,%rdx,%rax adcxq %rcx,%r12 adoxq %rbp,%r13 adcxq %r8,%r13 adoxq %r8,%r8 adcq $0,%r8 mulxq 0+128(%r14),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 8+128(%r14),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 16+128(%r14),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 24+128(%r14),%rcx,%rbp movq 16(%rbx),%rdx adcxq %rcx,%r12 adoxq %rbp,%r13 adcxq %r9,%r13 adoxq %r9,%r8 adcq $0,%r8 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 24+128(%rsi),%rcx,%rbp movq %r10,%rdx mulxq %r15,%rdx,%rax adcxq %rcx,%r13 adoxq %rbp,%r8 adcxq %r9,%r8 adoxq %r9,%r9 adcq $0,%r9 mulxq 0+128(%r14),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 8+128(%r14),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 16+128(%r14),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 24+128(%r14),%rcx,%rbp movq 24(%rbx),%rdx adcxq %rcx,%r13 adoxq %rbp,%r8 adcxq %r10,%r8 adoxq %r10,%r9 adcq $0,%r9 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r13 adoxq %rbp,%r8 mulxq 24+128(%rsi),%rcx,%rbp movq %r11,%rdx mulxq %r15,%rdx,%rax adcxq %rcx,%r8 adoxq %rbp,%r9 adcxq %r10,%r9 adoxq %r10,%r10 adcq $0,%r10 mulxq 0+128(%r14),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 8+128(%r14),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 16+128(%r14),%rcx,%rbp adcxq %rcx,%r13 adoxq %rbp,%r8 mulxq 24+128(%r14),%rcx,%rbp leaq 128(%r14),%r14 movq %r12,%rbx adcxq %rcx,%r8 adoxq %rbp,%r9 movq %r13,%rdx adcxq %r11,%r9 adoxq %r11,%r10 adcq $0,%r10 movq %r8,%rcx subq 0(%r14),%r12 sbbq 8(%r14),%r13 sbbq 16(%r14),%r8 movq %r9,%rbp sbbq 24(%r14),%r9 sbbq $0,%r10 cmovcq %rbx,%r12 cmovcq %rdx,%r13 cmovcq %rcx,%r8 cmovcq %rbp,%r9 movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 0(%rsp),%r15 .cfi_restore %r15 movq 8(%rsp),%r14 .cfi_restore %r14 movq 16(%rsp),%r13 .cfi_restore %r13 movq 24(%rsp),%r12 .cfi_restore %r12 movq 32(%rsp),%rbx .cfi_restore %rbx movq 40(%rsp),%rbp .cfi_restore %rbp leaq 48(%rsp),%rsp .cfi_adjust_cfa_offset -48 .Lord_mulx_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_ord_mul_montx,.-ecp_nistz256_ord_mul_montx .type ecp_nistz256_ord_sqr_montx,@function .align 32 ecp_nistz256_ord_sqr_montx: .cfi_startproc .Lecp_nistz256_ord_sqr_montx: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lord_sqrx_body: movq %rdx,%rbx movq 0(%rsi),%rdx movq 8(%rsi),%r14 movq 16(%rsi),%r15 movq 24(%rsi),%r8 leaq .Lord(%rip),%rsi jmp .Loop_ord_sqrx .align 32 .Loop_ord_sqrx: mulxq %r14,%r9,%r10 mulxq %r15,%rcx,%r11 movq %rdx,%rax .byte 102,73,15,110,206 mulxq %r8,%rbp,%r12 movq %r14,%rdx addq %rcx,%r10 .byte 102,73,15,110,215 adcq %rbp,%r11 adcq $0,%r12 xorq %r13,%r13 mulxq %r15,%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq %r8,%rcx,%rbp movq %r15,%rdx adcxq %rcx,%r12 adoxq %rbp,%r13 adcq $0,%r13 mulxq %r8,%rcx,%r14 movq %rax,%rdx .byte 102,73,15,110,216 xorq %r15,%r15 adcxq %r9,%r9 adoxq %rcx,%r13 adcxq %r10,%r10 adoxq %r15,%r14 mulxq %rdx,%r8,%rbp .byte 102,72,15,126,202 adcxq %r11,%r11 adoxq %rbp,%r9 adcxq %r12,%r12 mulxq %rdx,%rcx,%rax .byte 102,72,15,126,210 adcxq %r13,%r13 adoxq %rcx,%r10 adcxq %r14,%r14 mulxq %rdx,%rcx,%rbp .byte 0x67 .byte 102,72,15,126,218 adoxq %rax,%r11 adcxq %r15,%r15 adoxq %rcx,%r12 adoxq %rbp,%r13 mulxq %rdx,%rcx,%rax adoxq %rcx,%r14 adoxq %rax,%r15 movq %r8,%rdx mulxq 32(%rsi),%rdx,%rcx xorq %rax,%rax mulxq 0(%rsi),%rcx,%rbp adcxq %rcx,%r8 adoxq %rbp,%r9 mulxq 8(%rsi),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 16(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 24(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r8 adcxq %rax,%r8 movq %r9,%rdx mulxq 32(%rsi),%rdx,%rcx mulxq 0(%rsi),%rcx,%rbp adoxq %rcx,%r9 adcxq %rbp,%r10 mulxq 8(%rsi),%rcx,%rbp adoxq %rcx,%r10 adcxq %rbp,%r11 mulxq 16(%rsi),%rcx,%rbp adoxq %rcx,%r11 adcxq %rbp,%r8 mulxq 24(%rsi),%rcx,%rbp adoxq %rcx,%r8 adcxq %rbp,%r9 adoxq %rax,%r9 movq %r10,%rdx mulxq 32(%rsi),%rdx,%rcx mulxq 0(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 8(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r8 mulxq 16(%rsi),%rcx,%rbp adcxq %rcx,%r8 adoxq %rbp,%r9 mulxq 24(%rsi),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 adcxq %rax,%r10 movq %r11,%rdx mulxq 32(%rsi),%rdx,%rcx mulxq 0(%rsi),%rcx,%rbp adoxq %rcx,%r11 adcxq %rbp,%r8 mulxq 8(%rsi),%rcx,%rbp adoxq %rcx,%r8 adcxq %rbp,%r9 mulxq 16(%rsi),%rcx,%rbp adoxq %rcx,%r9 adcxq %rbp,%r10 mulxq 24(%rsi),%rcx,%rbp adoxq %rcx,%r10 adcxq %rbp,%r11 adoxq %rax,%r11 addq %r8,%r12 adcq %r13,%r9 movq %r12,%rdx adcq %r14,%r10 adcq %r15,%r11 movq %r9,%r14 adcq $0,%rax subq 0(%rsi),%r12 movq %r10,%r15 sbbq 8(%rsi),%r9 sbbq 16(%rsi),%r10 movq %r11,%r8 sbbq 24(%rsi),%r11 sbbq $0,%rax cmovncq %r12,%rdx cmovncq %r9,%r14 cmovncq %r10,%r15 cmovncq %r11,%r8 decq %rbx jnz .Loop_ord_sqrx movq %rdx,0(%rdi) movq %r14,8(%rdi) pxor %xmm1,%xmm1 movq %r15,16(%rdi) pxor %xmm2,%xmm2 movq %r8,24(%rdi) pxor %xmm3,%xmm3 movq 0(%rsp),%r15 .cfi_restore %r15 movq 8(%rsp),%r14 .cfi_restore %r14 movq 16(%rsp),%r13 .cfi_restore %r13 movq 24(%rsp),%r12 .cfi_restore %r12 movq 32(%rsp),%rbx .cfi_restore %rbx movq 40(%rsp),%rbp .cfi_restore %rbp leaq 48(%rsp),%rsp .cfi_adjust_cfa_offset -48 .Lord_sqrx_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_ord_sqr_montx,.-ecp_nistz256_ord_sqr_montx #endif .globl ecp_nistz256_mul_mont .hidden ecp_nistz256_mul_mont .type ecp_nistz256_mul_mont,@function .align 32 ecp_nistz256_mul_mont: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx #endif .Lmul_mont: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lmul_body: #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX cmpl $0x80100,%ecx je .Lmul_montx #endif movq %rdx,%rbx movq 0(%rdx),%rax movq 0(%rsi),%r9 movq 8(%rsi),%r10 movq 16(%rsi),%r11 movq 24(%rsi),%r12 call __ecp_nistz256_mul_montq #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX jmp .Lmul_mont_done .align 32 .Lmul_montx: movq %rdx,%rbx movq 0(%rdx),%rdx movq 0(%rsi),%r9 movq 8(%rsi),%r10 movq 16(%rsi),%r11 movq 24(%rsi),%r12 leaq -128(%rsi),%rsi call __ecp_nistz256_mul_montx #endif .Lmul_mont_done: movq 0(%rsp),%r15 .cfi_restore %r15 movq 8(%rsp),%r14 .cfi_restore %r14 movq 16(%rsp),%r13 .cfi_restore %r13 movq 24(%rsp),%r12 .cfi_restore %r12 movq 32(%rsp),%rbx .cfi_restore %rbx movq 40(%rsp),%rbp .cfi_restore %rbp leaq 48(%rsp),%rsp .cfi_adjust_cfa_offset -48 .Lmul_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_mul_mont,.-ecp_nistz256_mul_mont .type __ecp_nistz256_mul_montq,@function .align 32 __ecp_nistz256_mul_montq: .cfi_startproc movq %rax,%rbp mulq %r9 movq .Lpoly+8(%rip),%r14 movq %rax,%r8 movq %rbp,%rax movq %rdx,%r9 mulq %r10 movq .Lpoly+24(%rip),%r15 addq %rax,%r9 movq %rbp,%rax adcq $0,%rdx movq %rdx,%r10 mulq %r11 addq %rax,%r10 movq %rbp,%rax adcq $0,%rdx movq %rdx,%r11 mulq %r12 addq %rax,%r11 movq %r8,%rax adcq $0,%rdx xorq %r13,%r13 movq %rdx,%r12 movq %r8,%rbp shlq $32,%r8 mulq %r15 shrq $32,%rbp addq %r8,%r9 adcq %rbp,%r10 adcq %rax,%r11 movq 8(%rbx),%rax adcq %rdx,%r12 adcq $0,%r13 xorq %r8,%r8 movq %rax,%rbp mulq 0(%rsi) addq %rax,%r9 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 8(%rsi) addq %rcx,%r10 adcq $0,%rdx addq %rax,%r10 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 16(%rsi) addq %rcx,%r11 adcq $0,%rdx addq %rax,%r11 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 24(%rsi) addq %rcx,%r12 adcq $0,%rdx addq %rax,%r12 movq %r9,%rax adcq %rdx,%r13 adcq $0,%r8 movq %r9,%rbp shlq $32,%r9 mulq %r15 shrq $32,%rbp addq %r9,%r10 adcq %rbp,%r11 adcq %rax,%r12 movq 16(%rbx),%rax adcq %rdx,%r13 adcq $0,%r8 xorq %r9,%r9 movq %rax,%rbp mulq 0(%rsi) addq %rax,%r10 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 8(%rsi) addq %rcx,%r11 adcq $0,%rdx addq %rax,%r11 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 16(%rsi) addq %rcx,%r12 adcq $0,%rdx addq %rax,%r12 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 24(%rsi) addq %rcx,%r13 adcq $0,%rdx addq %rax,%r13 movq %r10,%rax adcq %rdx,%r8 adcq $0,%r9 movq %r10,%rbp shlq $32,%r10 mulq %r15 shrq $32,%rbp addq %r10,%r11 adcq %rbp,%r12 adcq %rax,%r13 movq 24(%rbx),%rax adcq %rdx,%r8 adcq $0,%r9 xorq %r10,%r10 movq %rax,%rbp mulq 0(%rsi) addq %rax,%r11 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 8(%rsi) addq %rcx,%r12 adcq $0,%rdx addq %rax,%r12 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 16(%rsi) addq %rcx,%r13 adcq $0,%rdx addq %rax,%r13 movq %rbp,%rax adcq $0,%rdx movq %rdx,%rcx mulq 24(%rsi) addq %rcx,%r8 adcq $0,%rdx addq %rax,%r8 movq %r11,%rax adcq %rdx,%r9 adcq $0,%r10 movq %r11,%rbp shlq $32,%r11 mulq %r15 shrq $32,%rbp addq %r11,%r12 adcq %rbp,%r13 movq %r12,%rcx adcq %rax,%r8 adcq %rdx,%r9 movq %r13,%rbp adcq $0,%r10 subq $-1,%r12 movq %r8,%rbx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%rdx sbbq %r15,%r9 sbbq $0,%r10 cmovcq %rcx,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rbx,%r8 movq %r13,8(%rdi) cmovcq %rdx,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_mul_montq,.-__ecp_nistz256_mul_montq .globl ecp_nistz256_sqr_mont .hidden ecp_nistz256_sqr_mont .type ecp_nistz256_sqr_mont,@function .align 32 ecp_nistz256_sqr_mont: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx #endif pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .Lsqr_body: #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX cmpl $0x80100,%ecx je .Lsqr_montx #endif movq 0(%rsi),%rax movq 8(%rsi),%r14 movq 16(%rsi),%r15 movq 24(%rsi),%r8 call __ecp_nistz256_sqr_montq #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX jmp .Lsqr_mont_done .align 32 .Lsqr_montx: movq 0(%rsi),%rdx movq 8(%rsi),%r14 movq 16(%rsi),%r15 movq 24(%rsi),%r8 leaq -128(%rsi),%rsi call __ecp_nistz256_sqr_montx #endif .Lsqr_mont_done: movq 0(%rsp),%r15 .cfi_restore %r15 movq 8(%rsp),%r14 .cfi_restore %r14 movq 16(%rsp),%r13 .cfi_restore %r13 movq 24(%rsp),%r12 .cfi_restore %r12 movq 32(%rsp),%rbx .cfi_restore %rbx movq 40(%rsp),%rbp .cfi_restore %rbp leaq 48(%rsp),%rsp .cfi_adjust_cfa_offset -48 .Lsqr_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_sqr_mont,.-ecp_nistz256_sqr_mont .type __ecp_nistz256_sqr_montq,@function .align 32 __ecp_nistz256_sqr_montq: .cfi_startproc movq %rax,%r13 mulq %r14 movq %rax,%r9 movq %r15,%rax movq %rdx,%r10 mulq %r13 addq %rax,%r10 movq %r8,%rax adcq $0,%rdx movq %rdx,%r11 mulq %r13 addq %rax,%r11 movq %r15,%rax adcq $0,%rdx movq %rdx,%r12 mulq %r14 addq %rax,%r11 movq %r8,%rax adcq $0,%rdx movq %rdx,%rbp mulq %r14 addq %rax,%r12 movq %r8,%rax adcq $0,%rdx addq %rbp,%r12 movq %rdx,%r13 adcq $0,%r13 mulq %r15 xorq %r15,%r15 addq %rax,%r13 movq 0(%rsi),%rax movq %rdx,%r14 adcq $0,%r14 addq %r9,%r9 adcq %r10,%r10 adcq %r11,%r11 adcq %r12,%r12 adcq %r13,%r13 adcq %r14,%r14 adcq $0,%r15 mulq %rax movq %rax,%r8 movq 8(%rsi),%rax movq %rdx,%rcx mulq %rax addq %rcx,%r9 adcq %rax,%r10 movq 16(%rsi),%rax adcq $0,%rdx movq %rdx,%rcx mulq %rax addq %rcx,%r11 adcq %rax,%r12 movq 24(%rsi),%rax adcq $0,%rdx movq %rdx,%rcx mulq %rax addq %rcx,%r13 adcq %rax,%r14 movq %r8,%rax adcq %rdx,%r15 movq .Lpoly+8(%rip),%rsi movq .Lpoly+24(%rip),%rbp movq %r8,%rcx shlq $32,%r8 mulq %rbp shrq $32,%rcx addq %r8,%r9 adcq %rcx,%r10 adcq %rax,%r11 movq %r9,%rax adcq $0,%rdx movq %r9,%rcx shlq $32,%r9 movq %rdx,%r8 mulq %rbp shrq $32,%rcx addq %r9,%r10 adcq %rcx,%r11 adcq %rax,%r8 movq %r10,%rax adcq $0,%rdx movq %r10,%rcx shlq $32,%r10 movq %rdx,%r9 mulq %rbp shrq $32,%rcx addq %r10,%r11 adcq %rcx,%r8 adcq %rax,%r9 movq %r11,%rax adcq $0,%rdx movq %r11,%rcx shlq $32,%r11 movq %rdx,%r10 mulq %rbp shrq $32,%rcx addq %r11,%r8 adcq %rcx,%r9 adcq %rax,%r10 adcq $0,%rdx xorq %r11,%r11 addq %r8,%r12 adcq %r9,%r13 movq %r12,%r8 adcq %r10,%r14 adcq %rdx,%r15 movq %r13,%r9 adcq $0,%r11 subq $-1,%r12 movq %r14,%r10 sbbq %rsi,%r13 sbbq $0,%r14 movq %r15,%rcx sbbq %rbp,%r15 sbbq $0,%r11 cmovcq %r8,%r12 cmovcq %r9,%r13 movq %r12,0(%rdi) cmovcq %r10,%r14 movq %r13,8(%rdi) cmovcq %rcx,%r15 movq %r14,16(%rdi) movq %r15,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_sqr_montq,.-__ecp_nistz256_sqr_montq #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .type __ecp_nistz256_mul_montx,@function .align 32 __ecp_nistz256_mul_montx: .cfi_startproc mulxq %r9,%r8,%r9 mulxq %r10,%rcx,%r10 movq $32,%r14 xorq %r13,%r13 mulxq %r11,%rbp,%r11 movq .Lpoly+24(%rip),%r15 adcq %rcx,%r9 mulxq %r12,%rcx,%r12 movq %r8,%rdx adcq %rbp,%r10 shlxq %r14,%r8,%rbp adcq %rcx,%r11 shrxq %r14,%r8,%rcx adcq $0,%r12 addq %rbp,%r9 adcq %rcx,%r10 mulxq %r15,%rcx,%rbp movq 8(%rbx),%rdx adcq %rcx,%r11 adcq %rbp,%r12 adcq $0,%r13 xorq %r8,%r8 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r9 adoxq %rbp,%r10 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 24+128(%rsi),%rcx,%rbp movq %r9,%rdx adcxq %rcx,%r12 shlxq %r14,%r9,%rcx adoxq %rbp,%r13 shrxq %r14,%r9,%rbp adcxq %r8,%r13 adoxq %r8,%r8 adcq $0,%r8 addq %rcx,%r10 adcq %rbp,%r11 mulxq %r15,%rcx,%rbp movq 16(%rbx),%rdx adcq %rcx,%r12 adcq %rbp,%r13 adcq $0,%r8 xorq %r9,%r9 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r10 adoxq %rbp,%r11 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 24+128(%rsi),%rcx,%rbp movq %r10,%rdx adcxq %rcx,%r13 shlxq %r14,%r10,%rcx adoxq %rbp,%r8 shrxq %r14,%r10,%rbp adcxq %r9,%r8 adoxq %r9,%r9 adcq $0,%r9 addq %rcx,%r11 adcq %rbp,%r12 mulxq %r15,%rcx,%rbp movq 24(%rbx),%rdx adcq %rcx,%r13 adcq %rbp,%r8 adcq $0,%r9 xorq %r10,%r10 mulxq 0+128(%rsi),%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq 8+128(%rsi),%rcx,%rbp adcxq %rcx,%r12 adoxq %rbp,%r13 mulxq 16+128(%rsi),%rcx,%rbp adcxq %rcx,%r13 adoxq %rbp,%r8 mulxq 24+128(%rsi),%rcx,%rbp movq %r11,%rdx adcxq %rcx,%r8 shlxq %r14,%r11,%rcx adoxq %rbp,%r9 shrxq %r14,%r11,%rbp adcxq %r10,%r9 adoxq %r10,%r10 adcq $0,%r10 addq %rcx,%r12 adcq %rbp,%r13 mulxq %r15,%rcx,%rbp movq %r12,%rbx movq .Lpoly+8(%rip),%r14 adcq %rcx,%r8 movq %r13,%rdx adcq %rbp,%r9 adcq $0,%r10 xorl %eax,%eax movq %r8,%rcx sbbq $-1,%r12 sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%rbp sbbq %r15,%r9 sbbq $0,%r10 cmovcq %rbx,%r12 cmovcq %rdx,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %rbp,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_mul_montx,.-__ecp_nistz256_mul_montx .type __ecp_nistz256_sqr_montx,@function .align 32 __ecp_nistz256_sqr_montx: .cfi_startproc mulxq %r14,%r9,%r10 mulxq %r15,%rcx,%r11 xorl %eax,%eax adcq %rcx,%r10 mulxq %r8,%rbp,%r12 movq %r14,%rdx adcq %rbp,%r11 adcq $0,%r12 xorq %r13,%r13 mulxq %r15,%rcx,%rbp adcxq %rcx,%r11 adoxq %rbp,%r12 mulxq %r8,%rcx,%rbp movq %r15,%rdx adcxq %rcx,%r12 adoxq %rbp,%r13 adcq $0,%r13 mulxq %r8,%rcx,%r14 movq 0+128(%rsi),%rdx xorq %r15,%r15 adcxq %r9,%r9 adoxq %rcx,%r13 adcxq %r10,%r10 adoxq %r15,%r14 mulxq %rdx,%r8,%rbp movq 8+128(%rsi),%rdx adcxq %r11,%r11 adoxq %rbp,%r9 adcxq %r12,%r12 mulxq %rdx,%rcx,%rax movq 16+128(%rsi),%rdx adcxq %r13,%r13 adoxq %rcx,%r10 adcxq %r14,%r14 .byte 0x67 mulxq %rdx,%rcx,%rbp movq 24+128(%rsi),%rdx adoxq %rax,%r11 adcxq %r15,%r15 adoxq %rcx,%r12 movq $32,%rsi adoxq %rbp,%r13 .byte 0x67,0x67 mulxq %rdx,%rcx,%rax movq .Lpoly+24(%rip),%rdx adoxq %rcx,%r14 shlxq %rsi,%r8,%rcx adoxq %rax,%r15 shrxq %rsi,%r8,%rax movq %rdx,%rbp addq %rcx,%r9 adcq %rax,%r10 mulxq %r8,%rcx,%r8 adcq %rcx,%r11 shlxq %rsi,%r9,%rcx adcq $0,%r8 shrxq %rsi,%r9,%rax addq %rcx,%r10 adcq %rax,%r11 mulxq %r9,%rcx,%r9 adcq %rcx,%r8 shlxq %rsi,%r10,%rcx adcq $0,%r9 shrxq %rsi,%r10,%rax addq %rcx,%r11 adcq %rax,%r8 mulxq %r10,%rcx,%r10 adcq %rcx,%r9 shlxq %rsi,%r11,%rcx adcq $0,%r10 shrxq %rsi,%r11,%rax addq %rcx,%r8 adcq %rax,%r9 mulxq %r11,%rcx,%r11 adcq %rcx,%r10 adcq $0,%r11 xorq %rdx,%rdx addq %r8,%r12 movq .Lpoly+8(%rip),%rsi adcq %r9,%r13 movq %r12,%r8 adcq %r10,%r14 adcq %r11,%r15 movq %r13,%r9 adcq $0,%rdx subq $-1,%r12 movq %r14,%r10 sbbq %rsi,%r13 sbbq $0,%r14 movq %r15,%r11 sbbq %rbp,%r15 sbbq $0,%rdx cmovcq %r8,%r12 cmovcq %r9,%r13 movq %r12,0(%rdi) cmovcq %r10,%r14 movq %r13,8(%rdi) cmovcq %r11,%r15 movq %r14,16(%rdi) movq %r15,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_sqr_montx,.-__ecp_nistz256_sqr_montx #endif .globl ecp_nistz256_select_w5 .hidden ecp_nistz256_select_w5 .type ecp_nistz256_select_w5,@function .align 32 ecp_nistz256_select_w5: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rax movq 8(%rax),%rax testl $32,%eax jnz .Lavx2_select_w5 #endif movdqa .LOne(%rip),%xmm0 movd %edx,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 movdqa %xmm0,%xmm8 pshufd $0,%xmm1,%xmm1 movq $16,%rax .Lselect_loop_sse_w5: movdqa %xmm8,%xmm15 paddd %xmm0,%xmm8 pcmpeqd %xmm1,%xmm15 movdqa 0(%rsi),%xmm9 movdqa 16(%rsi),%xmm10 movdqa 32(%rsi),%xmm11 movdqa 48(%rsi),%xmm12 movdqa 64(%rsi),%xmm13 movdqa 80(%rsi),%xmm14 leaq 96(%rsi),%rsi pand %xmm15,%xmm9 pand %xmm15,%xmm10 por %xmm9,%xmm2 pand %xmm15,%xmm11 por %xmm10,%xmm3 pand %xmm15,%xmm12 por %xmm11,%xmm4 pand %xmm15,%xmm13 por %xmm12,%xmm5 pand %xmm15,%xmm14 por %xmm13,%xmm6 por %xmm14,%xmm7 decq %rax jnz .Lselect_loop_sse_w5 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqu %xmm4,32(%rdi) movdqu %xmm5,48(%rdi) movdqu %xmm6,64(%rdi) movdqu %xmm7,80(%rdi) .byte 0xf3,0xc3 .cfi_endproc .LSEH_end_ecp_nistz256_select_w5: .size ecp_nistz256_select_w5,.-ecp_nistz256_select_w5 .globl ecp_nistz256_select_w7 .hidden ecp_nistz256_select_w7 .type ecp_nistz256_select_w7,@function .align 32 ecp_nistz256_select_w7: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rax movq 8(%rax),%rax testl $32,%eax jnz .Lavx2_select_w7 #endif movdqa .LOne(%rip),%xmm8 movd %edx,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 movdqa %xmm8,%xmm0 pshufd $0,%xmm1,%xmm1 movq $64,%rax .Lselect_loop_sse_w7: movdqa %xmm8,%xmm15 paddd %xmm0,%xmm8 movdqa 0(%rsi),%xmm9 movdqa 16(%rsi),%xmm10 pcmpeqd %xmm1,%xmm15 movdqa 32(%rsi),%xmm11 movdqa 48(%rsi),%xmm12 leaq 64(%rsi),%rsi pand %xmm15,%xmm9 pand %xmm15,%xmm10 por %xmm9,%xmm2 pand %xmm15,%xmm11 por %xmm10,%xmm3 pand %xmm15,%xmm12 por %xmm11,%xmm4 prefetcht0 255(%rsi) por %xmm12,%xmm5 decq %rax jnz .Lselect_loop_sse_w7 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqu %xmm4,32(%rdi) movdqu %xmm5,48(%rdi) .byte 0xf3,0xc3 .cfi_endproc .LSEH_end_ecp_nistz256_select_w7: .size ecp_nistz256_select_w7,.-ecp_nistz256_select_w7 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .type ecp_nistz256_avx2_select_w5,@function .align 32 ecp_nistz256_avx2_select_w5: .cfi_startproc .Lavx2_select_w5: vzeroupper vmovdqa .LTwo(%rip),%ymm0 vpxor %ymm2,%ymm2,%ymm2 vpxor %ymm3,%ymm3,%ymm3 vpxor %ymm4,%ymm4,%ymm4 vmovdqa .LOne(%rip),%ymm5 vmovdqa .LTwo(%rip),%ymm10 vmovd %edx,%xmm1 vpermd %ymm1,%ymm2,%ymm1 movq $8,%rax .Lselect_loop_avx2_w5: vmovdqa 0(%rsi),%ymm6 vmovdqa 32(%rsi),%ymm7 vmovdqa 64(%rsi),%ymm8 vmovdqa 96(%rsi),%ymm11 vmovdqa 128(%rsi),%ymm12 vmovdqa 160(%rsi),%ymm13 vpcmpeqd %ymm1,%ymm5,%ymm9 vpcmpeqd %ymm1,%ymm10,%ymm14 vpaddd %ymm0,%ymm5,%ymm5 vpaddd %ymm0,%ymm10,%ymm10 leaq 192(%rsi),%rsi vpand %ymm9,%ymm6,%ymm6 vpand %ymm9,%ymm7,%ymm7 vpand %ymm9,%ymm8,%ymm8 vpand %ymm14,%ymm11,%ymm11 vpand %ymm14,%ymm12,%ymm12 vpand %ymm14,%ymm13,%ymm13 vpxor %ymm6,%ymm2,%ymm2 vpxor %ymm7,%ymm3,%ymm3 vpxor %ymm8,%ymm4,%ymm4 vpxor %ymm11,%ymm2,%ymm2 vpxor %ymm12,%ymm3,%ymm3 vpxor %ymm13,%ymm4,%ymm4 decq %rax jnz .Lselect_loop_avx2_w5 vmovdqu %ymm2,0(%rdi) vmovdqu %ymm3,32(%rdi) vmovdqu %ymm4,64(%rdi) vzeroupper .byte 0xf3,0xc3 .cfi_endproc .LSEH_end_ecp_nistz256_avx2_select_w5: .size ecp_nistz256_avx2_select_w5,.-ecp_nistz256_avx2_select_w5 #endif #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .globl ecp_nistz256_avx2_select_w7 .hidden ecp_nistz256_avx2_select_w7 .type ecp_nistz256_avx2_select_w7,@function .align 32 ecp_nistz256_avx2_select_w7: .cfi_startproc .Lavx2_select_w7: _CET_ENDBR vzeroupper vmovdqa .LThree(%rip),%ymm0 vpxor %ymm2,%ymm2,%ymm2 vpxor %ymm3,%ymm3,%ymm3 vmovdqa .LOne(%rip),%ymm4 vmovdqa .LTwo(%rip),%ymm8 vmovdqa .LThree(%rip),%ymm12 vmovd %edx,%xmm1 vpermd %ymm1,%ymm2,%ymm1 movq $21,%rax .Lselect_loop_avx2_w7: vmovdqa 0(%rsi),%ymm5 vmovdqa 32(%rsi),%ymm6 vmovdqa 64(%rsi),%ymm9 vmovdqa 96(%rsi),%ymm10 vmovdqa 128(%rsi),%ymm13 vmovdqa 160(%rsi),%ymm14 vpcmpeqd %ymm1,%ymm4,%ymm7 vpcmpeqd %ymm1,%ymm8,%ymm11 vpcmpeqd %ymm1,%ymm12,%ymm15 vpaddd %ymm0,%ymm4,%ymm4 vpaddd %ymm0,%ymm8,%ymm8 vpaddd %ymm0,%ymm12,%ymm12 leaq 192(%rsi),%rsi vpand %ymm7,%ymm5,%ymm5 vpand %ymm7,%ymm6,%ymm6 vpand %ymm11,%ymm9,%ymm9 vpand %ymm11,%ymm10,%ymm10 vpand %ymm15,%ymm13,%ymm13 vpand %ymm15,%ymm14,%ymm14 vpxor %ymm5,%ymm2,%ymm2 vpxor %ymm6,%ymm3,%ymm3 vpxor %ymm9,%ymm2,%ymm2 vpxor %ymm10,%ymm3,%ymm3 vpxor %ymm13,%ymm2,%ymm2 vpxor %ymm14,%ymm3,%ymm3 decq %rax jnz .Lselect_loop_avx2_w7 vmovdqa 0(%rsi),%ymm5 vmovdqa 32(%rsi),%ymm6 vpcmpeqd %ymm1,%ymm4,%ymm7 vpand %ymm7,%ymm5,%ymm5 vpand %ymm7,%ymm6,%ymm6 vpxor %ymm5,%ymm2,%ymm2 vpxor %ymm6,%ymm3,%ymm3 vmovdqu %ymm2,0(%rdi) vmovdqu %ymm3,32(%rdi) vzeroupper .byte 0xf3,0xc3 .cfi_endproc .LSEH_end_ecp_nistz256_avx2_select_w7: .size ecp_nistz256_avx2_select_w7,.-ecp_nistz256_avx2_select_w7 #endif .type __ecp_nistz256_add_toq,@function .align 32 __ecp_nistz256_add_toq: .cfi_startproc xorq %r11,%r11 addq 0(%rbx),%r12 adcq 8(%rbx),%r13 movq %r12,%rax adcq 16(%rbx),%r8 adcq 24(%rbx),%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_add_toq,.-__ecp_nistz256_add_toq .type __ecp_nistz256_sub_fromq,@function .align 32 __ecp_nistz256_sub_fromq: .cfi_startproc subq 0(%rbx),%r12 sbbq 8(%rbx),%r13 movq %r12,%rax sbbq 16(%rbx),%r8 sbbq 24(%rbx),%r9 movq %r13,%rbp sbbq %r11,%r11 addq $-1,%r12 movq %r8,%rcx adcq %r14,%r13 adcq $0,%r8 movq %r9,%r10 adcq %r15,%r9 testq %r11,%r11 cmovzq %rax,%r12 cmovzq %rbp,%r13 movq %r12,0(%rdi) cmovzq %rcx,%r8 movq %r13,8(%rdi) cmovzq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_sub_fromq,.-__ecp_nistz256_sub_fromq .type __ecp_nistz256_subq,@function .align 32 __ecp_nistz256_subq: .cfi_startproc subq %r12,%rax sbbq %r13,%rbp movq %rax,%r12 sbbq %r8,%rcx sbbq %r9,%r10 movq %rbp,%r13 sbbq %r11,%r11 addq $-1,%rax movq %rcx,%r8 adcq %r14,%rbp adcq $0,%rcx movq %r10,%r9 adcq %r15,%r10 testq %r11,%r11 cmovnzq %rax,%r12 cmovnzq %rbp,%r13 cmovnzq %rcx,%r8 cmovnzq %r10,%r9 .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_subq,.-__ecp_nistz256_subq .type __ecp_nistz256_mul_by_2q,@function .align 32 __ecp_nistz256_mul_by_2q: .cfi_startproc xorq %r11,%r11 addq %r12,%r12 adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_mul_by_2q,.-__ecp_nistz256_mul_by_2q .globl ecp_nistz256_point_double .hidden ecp_nistz256_point_double .type ecp_nistz256_point_double,@function .align 32 ecp_nistz256_point_double: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je .Lpoint_doublex #endif pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 subq $160+8,%rsp .cfi_adjust_cfa_offset 32*5+8 .Lpoint_doubleq_body: .Lpoint_double_shortcutq: movdqu 0(%rsi),%xmm0 movq %rsi,%rbx movdqu 16(%rsi),%xmm1 movq 32+0(%rsi),%r12 movq 32+8(%rsi),%r13 movq 32+16(%rsi),%r8 movq 32+24(%rsi),%r9 movq .Lpoly+8(%rip),%r14 movq .Lpoly+24(%rip),%r15 movdqa %xmm0,96(%rsp) movdqa %xmm1,96+16(%rsp) leaq 32(%rdi),%r10 leaq 64(%rdi),%r11 .byte 102,72,15,110,199 .byte 102,73,15,110,202 .byte 102,73,15,110,211 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_by_2q movq 64+0(%rsi),%rax movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 leaq 64-0(%rsi),%rsi leaq 64(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 0+0(%rsp),%rax movq 8+0(%rsp),%r14 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 leaq 0(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 32(%rbx),%rax movq 64+0(%rbx),%r9 movq 64+8(%rbx),%r10 movq 64+16(%rbx),%r11 movq 64+24(%rbx),%r12 leaq 64-0(%rbx),%rsi leaq 32(%rbx),%rbx .byte 102,72,15,126,215 call __ecp_nistz256_mul_montq call __ecp_nistz256_mul_by_2q movq 96+0(%rsp),%r12 movq 96+8(%rsp),%r13 leaq 64(%rsp),%rbx movq 96+16(%rsp),%r8 movq 96+24(%rsp),%r9 leaq 32(%rsp),%rdi call __ecp_nistz256_add_toq movq 96+0(%rsp),%r12 movq 96+8(%rsp),%r13 leaq 64(%rsp),%rbx movq 96+16(%rsp),%r8 movq 96+24(%rsp),%r9 leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 0+0(%rsp),%rax movq 8+0(%rsp),%r14 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 .byte 102,72,15,126,207 call __ecp_nistz256_sqr_montq xorq %r9,%r9 movq %r12,%rax addq $-1,%r12 movq %r13,%r10 adcq %rsi,%r13 movq %r14,%rcx adcq $0,%r14 movq %r15,%r8 adcq %rbp,%r15 adcq $0,%r9 xorq %rsi,%rsi testq $1,%rax cmovzq %rax,%r12 cmovzq %r10,%r13 cmovzq %rcx,%r14 cmovzq %r8,%r15 cmovzq %rsi,%r9 movq %r13,%rax shrq $1,%r12 shlq $63,%rax movq %r14,%r10 shrq $1,%r13 orq %rax,%r12 shlq $63,%r10 movq %r15,%rcx shrq $1,%r14 orq %r10,%r13 shlq $63,%rcx movq %r12,0(%rdi) shrq $1,%r15 movq %r13,8(%rdi) shlq $63,%r9 orq %rcx,%r14 orq %r9,%r15 movq %r14,16(%rdi) movq %r15,24(%rdi) movq 64(%rsp),%rax leaq 64(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 128(%rsp),%rdi call __ecp_nistz256_mul_by_2q leaq 32(%rsp),%rbx leaq 32(%rsp),%rdi call __ecp_nistz256_add_toq movq 96(%rsp),%rax leaq 96(%rsp),%rbx movq 0+0(%rsp),%r9 movq 8+0(%rsp),%r10 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r11 movq 24+0(%rsp),%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 128(%rsp),%rdi call __ecp_nistz256_mul_by_2q movq 0+32(%rsp),%rax movq 8+32(%rsp),%r14 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r15 movq 24+32(%rsp),%r8 .byte 102,72,15,126,199 call __ecp_nistz256_sqr_montq leaq 128(%rsp),%rbx movq %r14,%r8 movq %r15,%r9 movq %rsi,%r14 movq %rbp,%r15 call __ecp_nistz256_sub_fromq movq 0+0(%rsp),%rax movq 0+8(%rsp),%rbp movq 0+16(%rsp),%rcx movq 0+24(%rsp),%r10 leaq 0(%rsp),%rdi call __ecp_nistz256_subq movq 32(%rsp),%rax leaq 32(%rsp),%rbx movq %r12,%r14 xorl %ecx,%ecx movq %r12,0+0(%rsp) movq %r13,%r10 movq %r13,0+8(%rsp) cmovzq %r8,%r11 movq %r8,0+16(%rsp) leaq 0-0(%rsp),%rsi cmovzq %r9,%r12 movq %r9,0+24(%rsp) movq %r14,%r9 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montq .byte 102,72,15,126,203 .byte 102,72,15,126,207 call __ecp_nistz256_sub_fromq leaq 160+56(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbx .cfi_restore %rbx movq -8(%rsi),%rbp .cfi_restore %rbp leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lpoint_doubleq_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_point_double,.-ecp_nistz256_point_double .globl ecp_nistz256_point_add .hidden ecp_nistz256_point_add .type ecp_nistz256_point_add,@function .align 32 ecp_nistz256_point_add: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je .Lpoint_addx #endif pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 subq $576+8,%rsp .cfi_adjust_cfa_offset 32*18+8 .Lpoint_addq_body: movdqu 0(%rsi),%xmm0 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm3 movdqu 64(%rsi),%xmm4 movdqu 80(%rsi),%xmm5 movq %rsi,%rbx movq %rdx,%rsi movdqa %xmm0,384(%rsp) movdqa %xmm1,384+16(%rsp) movdqa %xmm2,416(%rsp) movdqa %xmm3,416+16(%rsp) movdqa %xmm4,448(%rsp) movdqa %xmm5,448+16(%rsp) por %xmm4,%xmm5 movdqu 0(%rsi),%xmm0 pshufd $0xb1,%xmm5,%xmm3 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 por %xmm3,%xmm5 movdqu 48(%rsi),%xmm3 movq 64+0(%rsi),%rax movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 movdqa %xmm0,480(%rsp) pshufd $0x1e,%xmm5,%xmm4 movdqa %xmm1,480+16(%rsp) movdqu 64(%rsi),%xmm0 movdqu 80(%rsi),%xmm1 movdqa %xmm2,512(%rsp) movdqa %xmm3,512+16(%rsp) por %xmm4,%xmm5 pxor %xmm4,%xmm4 por %xmm0,%xmm1 .byte 102,72,15,110,199 leaq 64-0(%rsi),%rsi movq %rax,544+0(%rsp) movq %r14,544+8(%rsp) movq %r15,544+16(%rsp) movq %r8,544+24(%rsp) leaq 96(%rsp),%rdi call __ecp_nistz256_sqr_montq pcmpeqd %xmm4,%xmm5 pshufd $0xb1,%xmm1,%xmm4 por %xmm1,%xmm4 pshufd $0,%xmm5,%xmm5 pshufd $0x1e,%xmm4,%xmm3 por %xmm3,%xmm4 pxor %xmm3,%xmm3 pcmpeqd %xmm3,%xmm4 pshufd $0,%xmm4,%xmm4 movq 64+0(%rbx),%rax movq 64+8(%rbx),%r14 movq 64+16(%rbx),%r15 movq 64+24(%rbx),%r8 .byte 102,72,15,110,203 leaq 64-0(%rbx),%rsi leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 544(%rsp),%rax leaq 544(%rsp),%rbx movq 0+96(%rsp),%r9 movq 8+96(%rsp),%r10 leaq 0+96(%rsp),%rsi movq 16+96(%rsp),%r11 movq 24+96(%rsp),%r12 leaq 224(%rsp),%rdi call __ecp_nistz256_mul_montq movq 448(%rsp),%rax leaq 448(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montq movq 416(%rsp),%rax leaq 416(%rsp),%rbx movq 0+224(%rsp),%r9 movq 8+224(%rsp),%r10 leaq 0+224(%rsp),%rsi movq 16+224(%rsp),%r11 movq 24+224(%rsp),%r12 leaq 224(%rsp),%rdi call __ecp_nistz256_mul_montq movq 512(%rsp),%rax leaq 512(%rsp),%rbx movq 0+256(%rsp),%r9 movq 8+256(%rsp),%r10 leaq 0+256(%rsp),%rsi movq 16+256(%rsp),%r11 movq 24+256(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 224(%rsp),%rbx leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromq orq %r13,%r12 movdqa %xmm4,%xmm2 orq %r8,%r12 orq %r9,%r12 por %xmm5,%xmm2 .byte 102,73,15,110,220 movq 384(%rsp),%rax leaq 384(%rsp),%rbx movq 0+96(%rsp),%r9 movq 8+96(%rsp),%r10 leaq 0+96(%rsp),%rsi movq 16+96(%rsp),%r11 movq 24+96(%rsp),%r12 leaq 160(%rsp),%rdi call __ecp_nistz256_mul_montq movq 480(%rsp),%rax leaq 480(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 192(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 160(%rsp),%rbx leaq 0(%rsp),%rdi call __ecp_nistz256_sub_fromq orq %r13,%r12 orq %r8,%r12 orq %r9,%r12 .byte 102,73,15,126,208 .byte 102,73,15,126,217 orq %r8,%r12 .byte 0x3e jnz .Ladd_proceedq testq %r9,%r9 jz .Ladd_doubleq .byte 102,72,15,126,199 pxor %xmm0,%xmm0 movdqu %xmm0,0(%rdi) movdqu %xmm0,16(%rdi) movdqu %xmm0,32(%rdi) movdqu %xmm0,48(%rdi) movdqu %xmm0,64(%rdi) movdqu %xmm0,80(%rdi) jmp .Ladd_doneq .align 32 .Ladd_doubleq: .byte 102,72,15,126,206 .byte 102,72,15,126,199 addq $416,%rsp .cfi_adjust_cfa_offset -416 jmp .Lpoint_double_shortcutq .cfi_adjust_cfa_offset 416 .align 32 .Ladd_proceedq: movq 0+64(%rsp),%rax movq 8+64(%rsp),%r14 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r15 movq 24+64(%rsp),%r8 leaq 96(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 448(%rsp),%rax leaq 448(%rsp),%rbx movq 0+0(%rsp),%r9 movq 8+0(%rsp),%r10 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r11 movq 24+0(%rsp),%r12 leaq 352(%rsp),%rdi call __ecp_nistz256_mul_montq movq 0+0(%rsp),%rax movq 8+0(%rsp),%r14 leaq 0+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 544(%rsp),%rax leaq 544(%rsp),%rbx movq 0+352(%rsp),%r9 movq 8+352(%rsp),%r10 leaq 0+352(%rsp),%rsi movq 16+352(%rsp),%r11 movq 24+352(%rsp),%r12 leaq 352(%rsp),%rdi call __ecp_nistz256_mul_montq movq 0(%rsp),%rax leaq 0(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 128(%rsp),%rdi call __ecp_nistz256_mul_montq movq 160(%rsp),%rax leaq 160(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 192(%rsp),%rdi call __ecp_nistz256_mul_montq xorq %r11,%r11 addq %r12,%r12 leaq 96(%rsp),%rsi adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 movq 0(%rsi),%rax cmovcq %rbp,%r13 movq 8(%rsi),%rbp cmovcq %rcx,%r8 movq 16(%rsi),%rcx cmovcq %r10,%r9 movq 24(%rsi),%r10 call __ecp_nistz256_subq leaq 128(%rsp),%rbx leaq 288(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 192+0(%rsp),%rax movq 192+8(%rsp),%rbp movq 192+16(%rsp),%rcx movq 192+24(%rsp),%r10 leaq 320(%rsp),%rdi call __ecp_nistz256_subq movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 128(%rsp),%rax leaq 128(%rsp),%rbx movq 0+224(%rsp),%r9 movq 8+224(%rsp),%r10 leaq 0+224(%rsp),%rsi movq 16+224(%rsp),%r11 movq 24+224(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montq movq 320(%rsp),%rax leaq 320(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 320(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 256(%rsp),%rbx leaq 320(%rsp),%rdi call __ecp_nistz256_sub_fromq .byte 102,72,15,126,199 movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 352(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 352+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 544(%rsp),%xmm2 pand 544+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 448(%rsp),%xmm2 pand 448+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,64(%rdi) movdqu %xmm3,80(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 288(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 288+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 480(%rsp),%xmm2 pand 480+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 384(%rsp),%xmm2 pand 384+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 320(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 320+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 512(%rsp),%xmm2 pand 512+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 416(%rsp),%xmm2 pand 416+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) .Ladd_doneq: leaq 576+56(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbx .cfi_restore %rbx movq -8(%rsi),%rbp .cfi_restore %rbp leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lpoint_addq_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_point_add,.-ecp_nistz256_point_add .globl ecp_nistz256_point_add_affine .hidden ecp_nistz256_point_add_affine .type ecp_nistz256_point_add_affine,@function .align 32 ecp_nistz256_point_add_affine: .cfi_startproc _CET_ENDBR #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX leaq OPENSSL_ia32cap_P(%rip),%rcx movq 8(%rcx),%rcx andl $0x80100,%ecx cmpl $0x80100,%ecx je .Lpoint_add_affinex #endif pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 subq $480+8,%rsp .cfi_adjust_cfa_offset 32*15+8 .Ladd_affineq_body: movdqu 0(%rsi),%xmm0 movq %rdx,%rbx movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm3 movdqu 64(%rsi),%xmm4 movdqu 80(%rsi),%xmm5 movq 64+0(%rsi),%rax movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 movdqa %xmm0,320(%rsp) movdqa %xmm1,320+16(%rsp) movdqa %xmm2,352(%rsp) movdqa %xmm3,352+16(%rsp) movdqa %xmm4,384(%rsp) movdqa %xmm5,384+16(%rsp) por %xmm4,%xmm5 movdqu 0(%rbx),%xmm0 pshufd $0xb1,%xmm5,%xmm3 movdqu 16(%rbx),%xmm1 movdqu 32(%rbx),%xmm2 por %xmm3,%xmm5 movdqu 48(%rbx),%xmm3 movdqa %xmm0,416(%rsp) pshufd $0x1e,%xmm5,%xmm4 movdqa %xmm1,416+16(%rsp) por %xmm0,%xmm1 .byte 102,72,15,110,199 movdqa %xmm2,448(%rsp) movdqa %xmm3,448+16(%rsp) por %xmm2,%xmm3 por %xmm4,%xmm5 pxor %xmm4,%xmm4 por %xmm1,%xmm3 leaq 64-0(%rsi),%rsi leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montq pcmpeqd %xmm4,%xmm5 pshufd $0xb1,%xmm3,%xmm4 movq 0(%rbx),%rax movq %r12,%r9 por %xmm3,%xmm4 pshufd $0,%xmm5,%xmm5 pshufd $0x1e,%xmm4,%xmm3 movq %r13,%r10 por %xmm3,%xmm4 pxor %xmm3,%xmm3 movq %r14,%r11 pcmpeqd %xmm3,%xmm4 pshufd $0,%xmm4,%xmm4 leaq 32-0(%rsp),%rsi movq %r15,%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 320(%rsp),%rbx leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 384(%rsp),%rax leaq 384(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montq movq 384(%rsp),%rax leaq 384(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 288(%rsp),%rdi call __ecp_nistz256_mul_montq movq 448(%rsp),%rax leaq 448(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq 0+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 352(%rsp),%rbx leaq 96(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 0+64(%rsp),%rax movq 8+64(%rsp),%r14 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r15 movq 24+64(%rsp),%r8 leaq 128(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 0+96(%rsp),%rax movq 8+96(%rsp),%r14 leaq 0+96(%rsp),%rsi movq 16+96(%rsp),%r15 movq 24+96(%rsp),%r8 leaq 192(%rsp),%rdi call __ecp_nistz256_sqr_montq movq 128(%rsp),%rax leaq 128(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 160(%rsp),%rdi call __ecp_nistz256_mul_montq movq 320(%rsp),%rax leaq 320(%rsp),%rbx movq 0+128(%rsp),%r9 movq 8+128(%rsp),%r10 leaq 0+128(%rsp),%rsi movq 16+128(%rsp),%r11 movq 24+128(%rsp),%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montq xorq %r11,%r11 addq %r12,%r12 leaq 192(%rsp),%rsi adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 movq 0(%rsi),%rax cmovcq %rbp,%r13 movq 8(%rsi),%rbp cmovcq %rcx,%r8 movq 16(%rsi),%rcx cmovcq %r10,%r9 movq 24(%rsi),%r10 call __ecp_nistz256_subq leaq 160(%rsp),%rbx leaq 224(%rsp),%rdi call __ecp_nistz256_sub_fromq movq 0+0(%rsp),%rax movq 0+8(%rsp),%rbp movq 0+16(%rsp),%rcx movq 0+24(%rsp),%r10 leaq 64(%rsp),%rdi call __ecp_nistz256_subq movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 352(%rsp),%rax leaq 352(%rsp),%rbx movq 0+160(%rsp),%r9 movq 8+160(%rsp),%r10 leaq 0+160(%rsp),%rsi movq 16+160(%rsp),%r11 movq 24+160(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montq movq 96(%rsp),%rax leaq 96(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq 0+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 64(%rsp),%rdi call __ecp_nistz256_mul_montq leaq 32(%rsp),%rbx leaq 256(%rsp),%rdi call __ecp_nistz256_sub_fromq .byte 102,72,15,126,199 movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 288(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 288+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand .LONE_mont(%rip),%xmm2 pand .LONE_mont+16(%rip),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 384(%rsp),%xmm2 pand 384+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,64(%rdi) movdqu %xmm3,80(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 224(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 224+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 416(%rsp),%xmm2 pand 416+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 320(%rsp),%xmm2 pand 320+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 256(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 256+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 448(%rsp),%xmm2 pand 448+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 352(%rsp),%xmm2 pand 352+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) leaq 480+56(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbx .cfi_restore %rbx movq -8(%rsi),%rbp .cfi_restore %rbp leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Ladd_affineq_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_point_add_affine,.-ecp_nistz256_point_add_affine #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .type __ecp_nistz256_add_tox,@function .align 32 __ecp_nistz256_add_tox: .cfi_startproc xorq %r11,%r11 adcq 0(%rbx),%r12 adcq 8(%rbx),%r13 movq %r12,%rax adcq 16(%rbx),%r8 adcq 24(%rbx),%r9 movq %r13,%rbp adcq $0,%r11 xorq %r10,%r10 sbbq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_add_tox,.-__ecp_nistz256_add_tox .type __ecp_nistz256_sub_fromx,@function .align 32 __ecp_nistz256_sub_fromx: .cfi_startproc xorq %r11,%r11 sbbq 0(%rbx),%r12 sbbq 8(%rbx),%r13 movq %r12,%rax sbbq 16(%rbx),%r8 sbbq 24(%rbx),%r9 movq %r13,%rbp sbbq $0,%r11 xorq %r10,%r10 adcq $-1,%r12 movq %r8,%rcx adcq %r14,%r13 adcq $0,%r8 movq %r9,%r10 adcq %r15,%r9 btq $0,%r11 cmovncq %rax,%r12 cmovncq %rbp,%r13 movq %r12,0(%rdi) cmovncq %rcx,%r8 movq %r13,8(%rdi) cmovncq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_sub_fromx,.-__ecp_nistz256_sub_fromx .type __ecp_nistz256_subx,@function .align 32 __ecp_nistz256_subx: .cfi_startproc xorq %r11,%r11 sbbq %r12,%rax sbbq %r13,%rbp movq %rax,%r12 sbbq %r8,%rcx sbbq %r9,%r10 movq %rbp,%r13 sbbq $0,%r11 xorq %r9,%r9 adcq $-1,%rax movq %rcx,%r8 adcq %r14,%rbp adcq $0,%rcx movq %r10,%r9 adcq %r15,%r10 btq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 cmovcq %rcx,%r8 cmovcq %r10,%r9 .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_subx,.-__ecp_nistz256_subx .type __ecp_nistz256_mul_by_2x,@function .align 32 __ecp_nistz256_mul_by_2x: .cfi_startproc xorq %r11,%r11 adcq %r12,%r12 adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 xorq %r10,%r10 sbbq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 cmovcq %rbp,%r13 movq %r12,0(%rdi) cmovcq %rcx,%r8 movq %r13,8(%rdi) cmovcq %r10,%r9 movq %r8,16(%rdi) movq %r9,24(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size __ecp_nistz256_mul_by_2x,.-__ecp_nistz256_mul_by_2x .type ecp_nistz256_point_doublex,@function .align 32 ecp_nistz256_point_doublex: .cfi_startproc .Lpoint_doublex: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 subq $160+8,%rsp .cfi_adjust_cfa_offset 32*5+8 .Lpoint_doublex_body: .Lpoint_double_shortcutx: movdqu 0(%rsi),%xmm0 movq %rsi,%rbx movdqu 16(%rsi),%xmm1 movq 32+0(%rsi),%r12 movq 32+8(%rsi),%r13 movq 32+16(%rsi),%r8 movq 32+24(%rsi),%r9 movq .Lpoly+8(%rip),%r14 movq .Lpoly+24(%rip),%r15 movdqa %xmm0,96(%rsp) movdqa %xmm1,96+16(%rsp) leaq 32(%rdi),%r10 leaq 64(%rdi),%r11 .byte 102,72,15,110,199 .byte 102,73,15,110,202 .byte 102,73,15,110,211 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_by_2x movq 64+0(%rsi),%rdx movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 leaq 64-128(%rsi),%rsi leaq 64(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 0+0(%rsp),%rdx movq 8+0(%rsp),%r14 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 leaq 0(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 32(%rbx),%rdx movq 64+0(%rbx),%r9 movq 64+8(%rbx),%r10 movq 64+16(%rbx),%r11 movq 64+24(%rbx),%r12 leaq 64-128(%rbx),%rsi leaq 32(%rbx),%rbx .byte 102,72,15,126,215 call __ecp_nistz256_mul_montx call __ecp_nistz256_mul_by_2x movq 96+0(%rsp),%r12 movq 96+8(%rsp),%r13 leaq 64(%rsp),%rbx movq 96+16(%rsp),%r8 movq 96+24(%rsp),%r9 leaq 32(%rsp),%rdi call __ecp_nistz256_add_tox movq 96+0(%rsp),%r12 movq 96+8(%rsp),%r13 leaq 64(%rsp),%rbx movq 96+16(%rsp),%r8 movq 96+24(%rsp),%r9 leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 0+0(%rsp),%rdx movq 8+0(%rsp),%r14 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 .byte 102,72,15,126,207 call __ecp_nistz256_sqr_montx xorq %r9,%r9 movq %r12,%rax addq $-1,%r12 movq %r13,%r10 adcq %rsi,%r13 movq %r14,%rcx adcq $0,%r14 movq %r15,%r8 adcq %rbp,%r15 adcq $0,%r9 xorq %rsi,%rsi testq $1,%rax cmovzq %rax,%r12 cmovzq %r10,%r13 cmovzq %rcx,%r14 cmovzq %r8,%r15 cmovzq %rsi,%r9 movq %r13,%rax shrq $1,%r12 shlq $63,%rax movq %r14,%r10 shrq $1,%r13 orq %rax,%r12 shlq $63,%r10 movq %r15,%rcx shrq $1,%r14 orq %r10,%r13 shlq $63,%rcx movq %r12,0(%rdi) shrq $1,%r15 movq %r13,8(%rdi) shlq $63,%r9 orq %rcx,%r14 orq %r9,%r15 movq %r14,16(%rdi) movq %r15,24(%rdi) movq 64(%rsp),%rdx leaq 64(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 128(%rsp),%rdi call __ecp_nistz256_mul_by_2x leaq 32(%rsp),%rbx leaq 32(%rsp),%rdi call __ecp_nistz256_add_tox movq 96(%rsp),%rdx leaq 96(%rsp),%rbx movq 0+0(%rsp),%r9 movq 8+0(%rsp),%r10 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r11 movq 24+0(%rsp),%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 128(%rsp),%rdi call __ecp_nistz256_mul_by_2x movq 0+32(%rsp),%rdx movq 8+32(%rsp),%r14 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r15 movq 24+32(%rsp),%r8 .byte 102,72,15,126,199 call __ecp_nistz256_sqr_montx leaq 128(%rsp),%rbx movq %r14,%r8 movq %r15,%r9 movq %rsi,%r14 movq %rbp,%r15 call __ecp_nistz256_sub_fromx movq 0+0(%rsp),%rax movq 0+8(%rsp),%rbp movq 0+16(%rsp),%rcx movq 0+24(%rsp),%r10 leaq 0(%rsp),%rdi call __ecp_nistz256_subx movq 32(%rsp),%rdx leaq 32(%rsp),%rbx movq %r12,%r14 xorl %ecx,%ecx movq %r12,0+0(%rsp) movq %r13,%r10 movq %r13,0+8(%rsp) cmovzq %r8,%r11 movq %r8,0+16(%rsp) leaq 0-128(%rsp),%rsi cmovzq %r9,%r12 movq %r9,0+24(%rsp) movq %r14,%r9 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montx .byte 102,72,15,126,203 .byte 102,72,15,126,207 call __ecp_nistz256_sub_fromx leaq 160+56(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbx .cfi_restore %rbx movq -8(%rsi),%rbp .cfi_restore %rbp leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lpoint_doublex_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_point_doublex,.-ecp_nistz256_point_doublex .type ecp_nistz256_point_addx,@function .align 32 ecp_nistz256_point_addx: .cfi_startproc .Lpoint_addx: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 subq $576+8,%rsp .cfi_adjust_cfa_offset 32*18+8 .Lpoint_addx_body: movdqu 0(%rsi),%xmm0 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm3 movdqu 64(%rsi),%xmm4 movdqu 80(%rsi),%xmm5 movq %rsi,%rbx movq %rdx,%rsi movdqa %xmm0,384(%rsp) movdqa %xmm1,384+16(%rsp) movdqa %xmm2,416(%rsp) movdqa %xmm3,416+16(%rsp) movdqa %xmm4,448(%rsp) movdqa %xmm5,448+16(%rsp) por %xmm4,%xmm5 movdqu 0(%rsi),%xmm0 pshufd $0xb1,%xmm5,%xmm3 movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 por %xmm3,%xmm5 movdqu 48(%rsi),%xmm3 movq 64+0(%rsi),%rdx movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 movdqa %xmm0,480(%rsp) pshufd $0x1e,%xmm5,%xmm4 movdqa %xmm1,480+16(%rsp) movdqu 64(%rsi),%xmm0 movdqu 80(%rsi),%xmm1 movdqa %xmm2,512(%rsp) movdqa %xmm3,512+16(%rsp) por %xmm4,%xmm5 pxor %xmm4,%xmm4 por %xmm0,%xmm1 .byte 102,72,15,110,199 leaq 64-128(%rsi),%rsi movq %rdx,544+0(%rsp) movq %r14,544+8(%rsp) movq %r15,544+16(%rsp) movq %r8,544+24(%rsp) leaq 96(%rsp),%rdi call __ecp_nistz256_sqr_montx pcmpeqd %xmm4,%xmm5 pshufd $0xb1,%xmm1,%xmm4 por %xmm1,%xmm4 pshufd $0,%xmm5,%xmm5 pshufd $0x1e,%xmm4,%xmm3 por %xmm3,%xmm4 pxor %xmm3,%xmm3 pcmpeqd %xmm3,%xmm4 pshufd $0,%xmm4,%xmm4 movq 64+0(%rbx),%rdx movq 64+8(%rbx),%r14 movq 64+16(%rbx),%r15 movq 64+24(%rbx),%r8 .byte 102,72,15,110,203 leaq 64-128(%rbx),%rsi leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 544(%rsp),%rdx leaq 544(%rsp),%rbx movq 0+96(%rsp),%r9 movq 8+96(%rsp),%r10 leaq -128+96(%rsp),%rsi movq 16+96(%rsp),%r11 movq 24+96(%rsp),%r12 leaq 224(%rsp),%rdi call __ecp_nistz256_mul_montx movq 448(%rsp),%rdx leaq 448(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montx movq 416(%rsp),%rdx leaq 416(%rsp),%rbx movq 0+224(%rsp),%r9 movq 8+224(%rsp),%r10 leaq -128+224(%rsp),%rsi movq 16+224(%rsp),%r11 movq 24+224(%rsp),%r12 leaq 224(%rsp),%rdi call __ecp_nistz256_mul_montx movq 512(%rsp),%rdx leaq 512(%rsp),%rbx movq 0+256(%rsp),%r9 movq 8+256(%rsp),%r10 leaq -128+256(%rsp),%rsi movq 16+256(%rsp),%r11 movq 24+256(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 224(%rsp),%rbx leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromx orq %r13,%r12 movdqa %xmm4,%xmm2 orq %r8,%r12 orq %r9,%r12 por %xmm5,%xmm2 .byte 102,73,15,110,220 movq 384(%rsp),%rdx leaq 384(%rsp),%rbx movq 0+96(%rsp),%r9 movq 8+96(%rsp),%r10 leaq -128+96(%rsp),%rsi movq 16+96(%rsp),%r11 movq 24+96(%rsp),%r12 leaq 160(%rsp),%rdi call __ecp_nistz256_mul_montx movq 480(%rsp),%rdx leaq 480(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 192(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 160(%rsp),%rbx leaq 0(%rsp),%rdi call __ecp_nistz256_sub_fromx orq %r13,%r12 orq %r8,%r12 orq %r9,%r12 .byte 102,73,15,126,208 .byte 102,73,15,126,217 orq %r8,%r12 .byte 0x3e jnz .Ladd_proceedx testq %r9,%r9 jz .Ladd_doublex .byte 102,72,15,126,199 pxor %xmm0,%xmm0 movdqu %xmm0,0(%rdi) movdqu %xmm0,16(%rdi) movdqu %xmm0,32(%rdi) movdqu %xmm0,48(%rdi) movdqu %xmm0,64(%rdi) movdqu %xmm0,80(%rdi) jmp .Ladd_donex .align 32 .Ladd_doublex: .byte 102,72,15,126,206 .byte 102,72,15,126,199 addq $416,%rsp .cfi_adjust_cfa_offset -416 jmp .Lpoint_double_shortcutx .cfi_adjust_cfa_offset 416 .align 32 .Ladd_proceedx: movq 0+64(%rsp),%rdx movq 8+64(%rsp),%r14 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r15 movq 24+64(%rsp),%r8 leaq 96(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 448(%rsp),%rdx leaq 448(%rsp),%rbx movq 0+0(%rsp),%r9 movq 8+0(%rsp),%r10 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r11 movq 24+0(%rsp),%r12 leaq 352(%rsp),%rdi call __ecp_nistz256_mul_montx movq 0+0(%rsp),%rdx movq 8+0(%rsp),%r14 leaq -128+0(%rsp),%rsi movq 16+0(%rsp),%r15 movq 24+0(%rsp),%r8 leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 544(%rsp),%rdx leaq 544(%rsp),%rbx movq 0+352(%rsp),%r9 movq 8+352(%rsp),%r10 leaq -128+352(%rsp),%rsi movq 16+352(%rsp),%r11 movq 24+352(%rsp),%r12 leaq 352(%rsp),%rdi call __ecp_nistz256_mul_montx movq 0(%rsp),%rdx leaq 0(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 128(%rsp),%rdi call __ecp_nistz256_mul_montx movq 160(%rsp),%rdx leaq 160(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 192(%rsp),%rdi call __ecp_nistz256_mul_montx xorq %r11,%r11 addq %r12,%r12 leaq 96(%rsp),%rsi adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 movq 0(%rsi),%rax cmovcq %rbp,%r13 movq 8(%rsi),%rbp cmovcq %rcx,%r8 movq 16(%rsi),%rcx cmovcq %r10,%r9 movq 24(%rsi),%r10 call __ecp_nistz256_subx leaq 128(%rsp),%rbx leaq 288(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 192+0(%rsp),%rax movq 192+8(%rsp),%rbp movq 192+16(%rsp),%rcx movq 192+24(%rsp),%r10 leaq 320(%rsp),%rdi call __ecp_nistz256_subx movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 128(%rsp),%rdx leaq 128(%rsp),%rbx movq 0+224(%rsp),%r9 movq 8+224(%rsp),%r10 leaq -128+224(%rsp),%rsi movq 16+224(%rsp),%r11 movq 24+224(%rsp),%r12 leaq 256(%rsp),%rdi call __ecp_nistz256_mul_montx movq 320(%rsp),%rdx leaq 320(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 320(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 256(%rsp),%rbx leaq 320(%rsp),%rdi call __ecp_nistz256_sub_fromx .byte 102,72,15,126,199 movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 352(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 352+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 544(%rsp),%xmm2 pand 544+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 448(%rsp),%xmm2 pand 448+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,64(%rdi) movdqu %xmm3,80(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 288(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 288+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 480(%rsp),%xmm2 pand 480+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 384(%rsp),%xmm2 pand 384+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 320(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 320+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 512(%rsp),%xmm2 pand 512+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 416(%rsp),%xmm2 pand 416+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) .Ladd_donex: leaq 576+56(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbx .cfi_restore %rbx movq -8(%rsi),%rbp .cfi_restore %rbp leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lpoint_addx_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_point_addx,.-ecp_nistz256_point_addx .type ecp_nistz256_point_add_affinex,@function .align 32 ecp_nistz256_point_add_affinex: .cfi_startproc .Lpoint_add_affinex: pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 subq $480+8,%rsp .cfi_adjust_cfa_offset 32*15+8 .Ladd_affinex_body: movdqu 0(%rsi),%xmm0 movq %rdx,%rbx movdqu 16(%rsi),%xmm1 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm3 movdqu 64(%rsi),%xmm4 movdqu 80(%rsi),%xmm5 movq 64+0(%rsi),%rdx movq 64+8(%rsi),%r14 movq 64+16(%rsi),%r15 movq 64+24(%rsi),%r8 movdqa %xmm0,320(%rsp) movdqa %xmm1,320+16(%rsp) movdqa %xmm2,352(%rsp) movdqa %xmm3,352+16(%rsp) movdqa %xmm4,384(%rsp) movdqa %xmm5,384+16(%rsp) por %xmm4,%xmm5 movdqu 0(%rbx),%xmm0 pshufd $0xb1,%xmm5,%xmm3 movdqu 16(%rbx),%xmm1 movdqu 32(%rbx),%xmm2 por %xmm3,%xmm5 movdqu 48(%rbx),%xmm3 movdqa %xmm0,416(%rsp) pshufd $0x1e,%xmm5,%xmm4 movdqa %xmm1,416+16(%rsp) por %xmm0,%xmm1 .byte 102,72,15,110,199 movdqa %xmm2,448(%rsp) movdqa %xmm3,448+16(%rsp) por %xmm2,%xmm3 por %xmm4,%xmm5 pxor %xmm4,%xmm4 por %xmm1,%xmm3 leaq 64-128(%rsi),%rsi leaq 32(%rsp),%rdi call __ecp_nistz256_sqr_montx pcmpeqd %xmm4,%xmm5 pshufd $0xb1,%xmm3,%xmm4 movq 0(%rbx),%rdx movq %r12,%r9 por %xmm3,%xmm4 pshufd $0,%xmm5,%xmm5 pshufd $0x1e,%xmm4,%xmm3 movq %r13,%r10 por %xmm3,%xmm4 pxor %xmm3,%xmm3 movq %r14,%r11 pcmpeqd %xmm3,%xmm4 pshufd $0,%xmm4,%xmm4 leaq 32-128(%rsp),%rsi movq %r15,%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 320(%rsp),%rbx leaq 64(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 384(%rsp),%rdx leaq 384(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montx movq 384(%rsp),%rdx leaq 384(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 288(%rsp),%rdi call __ecp_nistz256_mul_montx movq 448(%rsp),%rdx leaq 448(%rsp),%rbx movq 0+32(%rsp),%r9 movq 8+32(%rsp),%r10 leaq -128+32(%rsp),%rsi movq 16+32(%rsp),%r11 movq 24+32(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 352(%rsp),%rbx leaq 96(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 0+64(%rsp),%rdx movq 8+64(%rsp),%r14 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r15 movq 24+64(%rsp),%r8 leaq 128(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 0+96(%rsp),%rdx movq 8+96(%rsp),%r14 leaq -128+96(%rsp),%rsi movq 16+96(%rsp),%r15 movq 24+96(%rsp),%r8 leaq 192(%rsp),%rdi call __ecp_nistz256_sqr_montx movq 128(%rsp),%rdx leaq 128(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 160(%rsp),%rdi call __ecp_nistz256_mul_montx movq 320(%rsp),%rdx leaq 320(%rsp),%rbx movq 0+128(%rsp),%r9 movq 8+128(%rsp),%r10 leaq -128+128(%rsp),%rsi movq 16+128(%rsp),%r11 movq 24+128(%rsp),%r12 leaq 0(%rsp),%rdi call __ecp_nistz256_mul_montx xorq %r11,%r11 addq %r12,%r12 leaq 192(%rsp),%rsi adcq %r13,%r13 movq %r12,%rax adcq %r8,%r8 adcq %r9,%r9 movq %r13,%rbp adcq $0,%r11 subq $-1,%r12 movq %r8,%rcx sbbq %r14,%r13 sbbq $0,%r8 movq %r9,%r10 sbbq %r15,%r9 sbbq $0,%r11 cmovcq %rax,%r12 movq 0(%rsi),%rax cmovcq %rbp,%r13 movq 8(%rsi),%rbp cmovcq %rcx,%r8 movq 16(%rsi),%rcx cmovcq %r10,%r9 movq 24(%rsi),%r10 call __ecp_nistz256_subx leaq 160(%rsp),%rbx leaq 224(%rsp),%rdi call __ecp_nistz256_sub_fromx movq 0+0(%rsp),%rax movq 0+8(%rsp),%rbp movq 0+16(%rsp),%rcx movq 0+24(%rsp),%r10 leaq 64(%rsp),%rdi call __ecp_nistz256_subx movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r8,16(%rdi) movq %r9,24(%rdi) movq 352(%rsp),%rdx leaq 352(%rsp),%rbx movq 0+160(%rsp),%r9 movq 8+160(%rsp),%r10 leaq -128+160(%rsp),%rsi movq 16+160(%rsp),%r11 movq 24+160(%rsp),%r12 leaq 32(%rsp),%rdi call __ecp_nistz256_mul_montx movq 96(%rsp),%rdx leaq 96(%rsp),%rbx movq 0+64(%rsp),%r9 movq 8+64(%rsp),%r10 leaq -128+64(%rsp),%rsi movq 16+64(%rsp),%r11 movq 24+64(%rsp),%r12 leaq 64(%rsp),%rdi call __ecp_nistz256_mul_montx leaq 32(%rsp),%rbx leaq 256(%rsp),%rdi call __ecp_nistz256_sub_fromx .byte 102,72,15,126,199 movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 288(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 288+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand .LONE_mont(%rip),%xmm2 pand .LONE_mont+16(%rip),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 384(%rsp),%xmm2 pand 384+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,64(%rdi) movdqu %xmm3,80(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 224(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 224+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 416(%rsp),%xmm2 pand 416+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 320(%rsp),%xmm2 pand 320+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,0(%rdi) movdqu %xmm3,16(%rdi) movdqa %xmm5,%xmm0 movdqa %xmm5,%xmm1 pandn 256(%rsp),%xmm0 movdqa %xmm5,%xmm2 pandn 256+16(%rsp),%xmm1 movdqa %xmm5,%xmm3 pand 448(%rsp),%xmm2 pand 448+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqa %xmm4,%xmm0 movdqa %xmm4,%xmm1 pandn %xmm2,%xmm0 movdqa %xmm4,%xmm2 pandn %xmm3,%xmm1 movdqa %xmm4,%xmm3 pand 352(%rsp),%xmm2 pand 352+16(%rsp),%xmm3 por %xmm0,%xmm2 por %xmm1,%xmm3 movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) leaq 480+56(%rsp),%rsi .cfi_def_cfa %rsi,8 movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbx .cfi_restore %rbx movq -8(%rsi),%rbp .cfi_restore %rbp leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Ladd_affinex_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ecp_nistz256_point_add_affinex,.-ecp_nistz256_point_add_affinex #endif #endif
marvin-hansen/iggy-streaming-system
20,659
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/x86_64-mont.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .globl bn_mul_mont_nohw .hidden bn_mul_mont_nohw .type bn_mul_mont_nohw,@function .align 16 bn_mul_mont_nohw: .cfi_startproc _CET_ENDBR movl %r9d,%r9d movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 negq %r9 movq %rsp,%r11 leaq -16(%rsp,%r9,8),%r10 negq %r9 andq $-1024,%r10 subq %r10,%r11 andq $-4096,%r11 leaq (%r10,%r11,1),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja .Lmul_page_walk jmp .Lmul_page_walk_done .align 16 .Lmul_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja .Lmul_page_walk .Lmul_page_walk_done: movq %rax,8(%rsp,%r9,8) .cfi_escape 0x0f,0x0a,0x77,0x08,0x79,0x00,0x38,0x1e,0x22,0x06,0x23,0x08 .Lmul_body: movq %rdx,%r12 movq (%r8),%r8 movq (%r12),%rbx movq (%rsi),%rax xorq %r14,%r14 xorq %r15,%r15 movq %r8,%rbp mulq %rbx movq %rax,%r10 movq (%rcx),%rax imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%r13 leaq 1(%r15),%r15 jmp .L1st_enter .align 16 .L1st: addq %rax,%r13 movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%r13 movq %r10,%r11 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 .L1st_enter: mulq %rbx addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx leaq 1(%r15),%r15 movq %rdx,%r10 mulq %rbp cmpq %r9,%r15 jne .L1st addq %rax,%r13 movq (%rsi),%rax adcq $0,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 movq %r10,%r11 xorq %rdx,%rdx addq %r11,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r9,8) movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 jmp .Louter .align 16 .Louter: movq (%r12,%r14,8),%rbx xorq %r15,%r15 movq %r8,%rbp movq (%rsp),%r10 mulq %rbx addq %rax,%r10 movq (%rcx),%rax adcq $0,%rdx imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq 8(%rsp),%r10 movq %rdx,%r13 leaq 1(%r15),%r15 jmp .Linner_enter .align 16 .Linner: addq %rax,%r13 movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 movq (%rsp,%r15,8),%r10 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 .Linner_enter: mulq %rbx addq %rax,%r11 movq (%rcx,%r15,8),%rax adcq $0,%rdx addq %r11,%r10 movq %rdx,%r11 adcq $0,%r11 leaq 1(%r15),%r15 mulq %rbp cmpq %r9,%r15 jne .Linner addq %rax,%r13 movq (%rsi),%rax adcq $0,%rdx addq %r10,%r13 movq (%rsp,%r15,8),%r10 adcq $0,%rdx movq %r13,-16(%rsp,%r15,8) movq %rdx,%r13 xorq %rdx,%rdx addq %r11,%r13 adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r9,8) movq %rdx,(%rsp,%r9,8) leaq 1(%r14),%r14 cmpq %r9,%r14 jb .Louter xorq %r14,%r14 movq (%rsp),%rax movq %r9,%r15 .align 16 .Lsub: sbbq (%rcx,%r14,8),%rax movq %rax,(%rdi,%r14,8) movq 8(%rsp,%r14,8),%rax leaq 1(%r14),%r14 decq %r15 jnz .Lsub sbbq $0,%rax movq $-1,%rbx xorq %rax,%rbx xorq %r14,%r14 movq %r9,%r15 .Lcopy: movq (%rdi,%r14,8),%rcx movq (%rsp,%r14,8),%rdx andq %rbx,%rcx andq %rax,%rdx movq %r9,(%rsp,%r14,8) orq %rcx,%rdx movq %rdx,(%rdi,%r14,8) leaq 1(%r14),%r14 subq $1,%r15 jnz .Lcopy movq 8(%rsp,%r9,8),%rsi .cfi_def_cfa %rsi,8 movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lmul_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_mul_mont_nohw,.-bn_mul_mont_nohw .globl bn_mul4x_mont .hidden bn_mul4x_mont .type bn_mul4x_mont,@function .align 16 bn_mul4x_mont: .cfi_startproc _CET_ENDBR movl %r9d,%r9d movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 negq %r9 movq %rsp,%r11 leaq -32(%rsp,%r9,8),%r10 negq %r9 andq $-1024,%r10 subq %r10,%r11 andq $-4096,%r11 leaq (%r10,%r11,1),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja .Lmul4x_page_walk jmp .Lmul4x_page_walk_done .Lmul4x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r11 cmpq %r10,%rsp ja .Lmul4x_page_walk .Lmul4x_page_walk_done: movq %rax,8(%rsp,%r9,8) .cfi_escape 0x0f,0x0a,0x77,0x08,0x79,0x00,0x38,0x1e,0x22,0x06,0x23,0x08 .Lmul4x_body: movq %rdi,16(%rsp,%r9,8) movq %rdx,%r12 movq (%r8),%r8 movq (%r12),%rbx movq (%rsi),%rax xorq %r14,%r14 xorq %r15,%r15 movq %r8,%rbp mulq %rbx movq %rax,%r10 movq (%rcx),%rax imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi),%rax adcq $0,%rdx addq %r11,%rdi leaq 4(%r15),%r15 adcq $0,%rdx movq %rdi,(%rsp) movq %rdx,%r13 jmp .L1st4x .align 16 .L1st4x: mulq %rbx addq %rax,%r10 movq -16(%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%rsp,%r15,8) movq %rdx,%r13 mulq %rbx addq %rax,%r10 movq (%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq 8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx,%r15,8),%rax adcq $0,%rdx leaq 4(%r15),%r15 movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq -16(%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-32(%rsp,%r15,8) movq %rdx,%r13 cmpq %r9,%r15 jb .L1st4x mulq %rbx addq %rax,%r10 movq -16(%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx,%r15,8),%rax adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%rsp,%r15,8) movq %rdx,%r13 xorq %rdi,%rdi addq %r10,%r13 adcq $0,%rdi movq %r13,-8(%rsp,%r15,8) movq %rdi,(%rsp,%r15,8) leaq 1(%r14),%r14 .align 4 .Louter4x: movq (%r12,%r14,8),%rbx xorq %r15,%r15 movq (%rsp),%r10 movq %r8,%rbp mulq %rbx addq %rax,%r10 movq (%rcx),%rax adcq $0,%rdx imulq %r10,%rbp movq %rdx,%r11 mulq %rbp addq %rax,%r10 movq 8(%rsi),%rax adcq $0,%rdx movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx),%rax adcq $0,%rdx addq 8(%rsp),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq 16(%rsi),%rax adcq $0,%rdx addq %r11,%rdi leaq 4(%r15),%r15 adcq $0,%rdx movq %rdi,(%rsp) movq %rdx,%r13 jmp .Linner4x .align 16 .Linner4x: mulq %rbx addq %rax,%r10 movq -16(%rcx,%r15,8),%rax adcq $0,%rdx addq -16(%rsp,%r15,8),%r10 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx,%r15,8),%rax adcq $0,%rdx addq -8(%rsp,%r15,8),%r11 adcq $0,%rdx movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%rsp,%r15,8) movq %rdx,%r13 mulq %rbx addq %rax,%r10 movq (%rcx,%r15,8),%rax adcq $0,%rdx addq (%rsp,%r15,8),%r10 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq 8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-8(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq 8(%rcx,%r15,8),%rax adcq $0,%rdx addq 8(%rsp,%r15,8),%r11 adcq $0,%rdx leaq 4(%r15),%r15 movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq -16(%rsi,%r15,8),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-32(%rsp,%r15,8) movq %rdx,%r13 cmpq %r9,%r15 jb .Linner4x mulq %rbx addq %rax,%r10 movq -16(%rcx,%r15,8),%rax adcq $0,%rdx addq -16(%rsp,%r15,8),%r10 adcq $0,%rdx movq %rdx,%r11 mulq %rbp addq %rax,%r13 movq -8(%rsi,%r15,8),%rax adcq $0,%rdx addq %r10,%r13 adcq $0,%rdx movq %r13,-24(%rsp,%r15,8) movq %rdx,%rdi mulq %rbx addq %rax,%r11 movq -8(%rcx,%r15,8),%rax adcq $0,%rdx addq -8(%rsp,%r15,8),%r11 adcq $0,%rdx leaq 1(%r14),%r14 movq %rdx,%r10 mulq %rbp addq %rax,%rdi movq (%rsi),%rax adcq $0,%rdx addq %r11,%rdi adcq $0,%rdx movq %rdi,-16(%rsp,%r15,8) movq %rdx,%r13 xorq %rdi,%rdi addq %r10,%r13 adcq $0,%rdi addq (%rsp,%r9,8),%r13 adcq $0,%rdi movq %r13,-8(%rsp,%r15,8) movq %rdi,(%rsp,%r15,8) cmpq %r9,%r14 jb .Louter4x movq 16(%rsp,%r9,8),%rdi leaq -4(%r9),%r15 movq 0(%rsp),%rax movq 8(%rsp),%rdx shrq $2,%r15 leaq (%rsp),%rsi xorq %r14,%r14 subq 0(%rcx),%rax movq 16(%rsi),%rbx movq 24(%rsi),%rbp sbbq 8(%rcx),%rdx .Lsub4x: movq %rax,0(%rdi,%r14,8) movq %rdx,8(%rdi,%r14,8) sbbq 16(%rcx,%r14,8),%rbx movq 32(%rsi,%r14,8),%rax movq 40(%rsi,%r14,8),%rdx sbbq 24(%rcx,%r14,8),%rbp movq %rbx,16(%rdi,%r14,8) movq %rbp,24(%rdi,%r14,8) sbbq 32(%rcx,%r14,8),%rax movq 48(%rsi,%r14,8),%rbx movq 56(%rsi,%r14,8),%rbp sbbq 40(%rcx,%r14,8),%rdx leaq 4(%r14),%r14 decq %r15 jnz .Lsub4x movq %rax,0(%rdi,%r14,8) movq 32(%rsi,%r14,8),%rax sbbq 16(%rcx,%r14,8),%rbx movq %rdx,8(%rdi,%r14,8) sbbq 24(%rcx,%r14,8),%rbp movq %rbx,16(%rdi,%r14,8) sbbq $0,%rax movq %rbp,24(%rdi,%r14,8) pxor %xmm0,%xmm0 .byte 102,72,15,110,224 pcmpeqd %xmm5,%xmm5 pshufd $0,%xmm4,%xmm4 movq %r9,%r15 pxor %xmm4,%xmm5 shrq $2,%r15 xorl %eax,%eax jmp .Lcopy4x .align 16 .Lcopy4x: movdqa (%rsp,%rax,1),%xmm1 movdqu (%rdi,%rax,1),%xmm2 pand %xmm4,%xmm1 pand %xmm5,%xmm2 movdqa 16(%rsp,%rax,1),%xmm3 movdqa %xmm0,(%rsp,%rax,1) por %xmm2,%xmm1 movdqu 16(%rdi,%rax,1),%xmm2 movdqu %xmm1,(%rdi,%rax,1) pand %xmm4,%xmm3 pand %xmm5,%xmm2 movdqa %xmm0,16(%rsp,%rax,1) por %xmm2,%xmm3 movdqu %xmm3,16(%rdi,%rax,1) leaq 32(%rax),%rax decq %r15 jnz .Lcopy4x movq 8(%rsp,%r9,8),%rsi .cfi_def_cfa %rsi, 8 movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lmul4x_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_mul4x_mont,.-bn_mul4x_mont #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .extern bn_sqrx8x_internal .hidden bn_sqrx8x_internal #endif .extern bn_sqr8x_internal .hidden bn_sqr8x_internal .globl bn_sqr8x_mont .hidden bn_sqr8x_mont .type bn_sqr8x_mont,@function .align 32 bn_sqr8x_mont: .cfi_startproc _CET_ENDBR movl %r9d,%r9d movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 .Lsqr8x_prologue: movl %r9d,%r10d shll $3,%r9d shlq $3+2,%r10 negq %r9 leaq -64(%rsp,%r9,2),%r11 movq %rsp,%rbp movq (%r8),%r8 subq %rsi,%r11 andq $4095,%r11 cmpq %r11,%r10 jb .Lsqr8x_sp_alt subq %r11,%rbp leaq -64(%rbp,%r9,2),%rbp jmp .Lsqr8x_sp_done .align 32 .Lsqr8x_sp_alt: leaq 4096-64(,%r9,2),%r10 leaq -64(%rbp,%r9,2),%rbp subq %r10,%r11 movq $0,%r10 cmovcq %r10,%r11 subq %r11,%rbp .Lsqr8x_sp_done: andq $-64,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lsqr8x_page_walk jmp .Lsqr8x_page_walk_done .align 16 .Lsqr8x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lsqr8x_page_walk .Lsqr8x_page_walk_done: movq %r9,%r10 negq %r9 movq %r8,32(%rsp) movq %rax,40(%rsp) .cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08 .Lsqr8x_body: .byte 102,72,15,110,209 pxor %xmm0,%xmm0 .byte 102,72,15,110,207 .byte 102,73,15,110,218 #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX testq %rdx,%rdx jz .Lsqr8x_nox call bn_sqrx8x_internal leaq (%r8,%rcx,1),%rbx movq %rcx,%r9 movq %rcx,%rdx .byte 102,72,15,126,207 sarq $3+2,%rcx jmp .Lsqr8x_sub .align 32 .Lsqr8x_nox: #endif call bn_sqr8x_internal leaq (%rdi,%r9,1),%rbx movq %r9,%rcx movq %r9,%rdx .byte 102,72,15,126,207 sarq $3+2,%rcx jmp .Lsqr8x_sub .align 32 .Lsqr8x_sub: movq 0(%rbx),%r12 movq 8(%rbx),%r13 movq 16(%rbx),%r14 movq 24(%rbx),%r15 leaq 32(%rbx),%rbx sbbq 0(%rbp),%r12 sbbq 8(%rbp),%r13 sbbq 16(%rbp),%r14 sbbq 24(%rbp),%r15 leaq 32(%rbp),%rbp movq %r12,0(%rdi) movq %r13,8(%rdi) movq %r14,16(%rdi) movq %r15,24(%rdi) leaq 32(%rdi),%rdi incq %rcx jnz .Lsqr8x_sub sbbq $0,%rax leaq (%rbx,%r9,1),%rbx leaq (%rdi,%r9,1),%rdi .byte 102,72,15,110,200 pxor %xmm0,%xmm0 pshufd $0,%xmm1,%xmm1 movq 40(%rsp),%rsi .cfi_def_cfa %rsi,8 jmp .Lsqr8x_cond_copy .align 32 .Lsqr8x_cond_copy: movdqa 0(%rbx),%xmm2 movdqa 16(%rbx),%xmm3 leaq 32(%rbx),%rbx movdqu 0(%rdi),%xmm4 movdqu 16(%rdi),%xmm5 leaq 32(%rdi),%rdi movdqa %xmm0,-32(%rbx) movdqa %xmm0,-16(%rbx) movdqa %xmm0,-32(%rbx,%rdx,1) movdqa %xmm0,-16(%rbx,%rdx,1) pcmpeqd %xmm1,%xmm0 pand %xmm1,%xmm2 pand %xmm1,%xmm3 pand %xmm0,%xmm4 pand %xmm0,%xmm5 pxor %xmm0,%xmm0 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqu %xmm4,-32(%rdi) movdqu %xmm5,-16(%rdi) addq $32,%r9 jnz .Lsqr8x_cond_copy movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lsqr8x_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_sqr8x_mont,.-bn_sqr8x_mont #ifndef MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX .globl bn_mulx4x_mont .hidden bn_mulx4x_mont .type bn_mulx4x_mont,@function .align 32 bn_mulx4x_mont: .cfi_startproc _CET_ENDBR movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 .Lmulx4x_prologue: shll $3,%r9d xorq %r10,%r10 subq %r9,%r10 movq (%r8),%r8 leaq -72(%rsp,%r10,1),%rbp andq $-128,%rbp movq %rsp,%r11 subq %rbp,%r11 andq $-4096,%r11 leaq (%r11,%rbp,1),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lmulx4x_page_walk jmp .Lmulx4x_page_walk_done .align 16 .Lmulx4x_page_walk: leaq -4096(%rsp),%rsp movq (%rsp),%r10 cmpq %rbp,%rsp ja .Lmulx4x_page_walk .Lmulx4x_page_walk_done: leaq (%rdx,%r9,1),%r10 movq %r9,0(%rsp) shrq $5,%r9 movq %r10,16(%rsp) subq $1,%r9 movq %r8,24(%rsp) movq %rdi,32(%rsp) movq %rax,40(%rsp) .cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08 movq %r9,48(%rsp) jmp .Lmulx4x_body .align 32 .Lmulx4x_body: leaq 8(%rdx),%rdi movq (%rdx),%rdx leaq 64+32(%rsp),%rbx movq %rdx,%r9 mulxq 0(%rsi),%r8,%rax mulxq 8(%rsi),%r11,%r14 addq %rax,%r11 movq %rdi,8(%rsp) mulxq 16(%rsi),%r12,%r13 adcq %r14,%r12 adcq $0,%r13 movq %r8,%rdi imulq 24(%rsp),%r8 xorq %rbp,%rbp mulxq 24(%rsi),%rax,%r14 movq %r8,%rdx leaq 32(%rsi),%rsi adcxq %rax,%r13 adcxq %rbp,%r14 mulxq 0(%rcx),%rax,%r10 adcxq %rax,%rdi adoxq %r11,%r10 mulxq 8(%rcx),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 .byte 0xc4,0x62,0xfb,0xf6,0xa1,0x10,0x00,0x00,0x00 movq 48(%rsp),%rdi movq %r10,-32(%rbx) adcxq %rax,%r11 adoxq %r13,%r12 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r11,-24(%rbx) adcxq %rax,%r12 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r12,-16(%rbx) jmp .Lmulx4x_1st .align 32 .Lmulx4x_1st: adcxq %rbp,%r15 mulxq 0(%rsi),%r10,%rax adcxq %r14,%r10 mulxq 8(%rsi),%r11,%r14 adcxq %rax,%r11 mulxq 16(%rsi),%r12,%rax adcxq %r14,%r12 mulxq 24(%rsi),%r13,%r14 .byte 0x67,0x67 movq %r8,%rdx adcxq %rax,%r13 adcxq %rbp,%r14 leaq 32(%rsi),%rsi leaq 32(%rbx),%rbx adoxq %r15,%r10 mulxq 0(%rcx),%rax,%r15 adcxq %rax,%r10 adoxq %r15,%r11 mulxq 8(%rcx),%rax,%r15 adcxq %rax,%r11 adoxq %r15,%r12 mulxq 16(%rcx),%rax,%r15 movq %r10,-40(%rbx) adcxq %rax,%r12 movq %r11,-32(%rbx) adoxq %r15,%r13 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r12,-24(%rbx) adcxq %rax,%r13 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r13,-16(%rbx) decq %rdi jnz .Lmulx4x_1st movq 0(%rsp),%rax movq 8(%rsp),%rdi adcq %rbp,%r15 addq %r15,%r14 sbbq %r15,%r15 movq %r14,-8(%rbx) jmp .Lmulx4x_outer .align 32 .Lmulx4x_outer: movq (%rdi),%rdx leaq 8(%rdi),%rdi subq %rax,%rsi movq %r15,(%rbx) leaq 64+32(%rsp),%rbx subq %rax,%rcx mulxq 0(%rsi),%r8,%r11 xorl %ebp,%ebp movq %rdx,%r9 mulxq 8(%rsi),%r14,%r12 adoxq -32(%rbx),%r8 adcxq %r14,%r11 mulxq 16(%rsi),%r15,%r13 adoxq -24(%rbx),%r11 adcxq %r15,%r12 adoxq -16(%rbx),%r12 adcxq %rbp,%r13 adoxq %rbp,%r13 movq %rdi,8(%rsp) movq %r8,%r15 imulq 24(%rsp),%r8 xorl %ebp,%ebp mulxq 24(%rsi),%rax,%r14 movq %r8,%rdx adcxq %rax,%r13 adoxq -8(%rbx),%r13 adcxq %rbp,%r14 leaq 32(%rsi),%rsi adoxq %rbp,%r14 mulxq 0(%rcx),%rax,%r10 adcxq %rax,%r15 adoxq %r11,%r10 mulxq 8(%rcx),%rax,%r11 adcxq %rax,%r10 adoxq %r12,%r11 mulxq 16(%rcx),%rax,%r12 movq %r10,-32(%rbx) adcxq %rax,%r11 adoxq %r13,%r12 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r11,-24(%rbx) leaq 32(%rcx),%rcx adcxq %rax,%r12 adoxq %rbp,%r15 movq 48(%rsp),%rdi movq %r12,-16(%rbx) jmp .Lmulx4x_inner .align 32 .Lmulx4x_inner: mulxq 0(%rsi),%r10,%rax adcxq %rbp,%r15 adoxq %r14,%r10 mulxq 8(%rsi),%r11,%r14 adcxq 0(%rbx),%r10 adoxq %rax,%r11 mulxq 16(%rsi),%r12,%rax adcxq 8(%rbx),%r11 adoxq %r14,%r12 mulxq 24(%rsi),%r13,%r14 movq %r8,%rdx adcxq 16(%rbx),%r12 adoxq %rax,%r13 adcxq 24(%rbx),%r13 adoxq %rbp,%r14 leaq 32(%rsi),%rsi leaq 32(%rbx),%rbx adcxq %rbp,%r14 adoxq %r15,%r10 mulxq 0(%rcx),%rax,%r15 adcxq %rax,%r10 adoxq %r15,%r11 mulxq 8(%rcx),%rax,%r15 adcxq %rax,%r11 adoxq %r15,%r12 mulxq 16(%rcx),%rax,%r15 movq %r10,-40(%rbx) adcxq %rax,%r12 adoxq %r15,%r13 mulxq 24(%rcx),%rax,%r15 movq %r9,%rdx movq %r11,-32(%rbx) movq %r12,-24(%rbx) adcxq %rax,%r13 adoxq %rbp,%r15 leaq 32(%rcx),%rcx movq %r13,-16(%rbx) decq %rdi jnz .Lmulx4x_inner movq 0(%rsp),%rax movq 8(%rsp),%rdi adcq %rbp,%r15 subq 0(%rbx),%rbp adcq %r15,%r14 sbbq %r15,%r15 movq %r14,-8(%rbx) cmpq 16(%rsp),%rdi jne .Lmulx4x_outer leaq 64(%rsp),%rbx subq %rax,%rcx negq %r15 movq %rax,%rdx shrq $3+2,%rax movq 32(%rsp),%rdi jmp .Lmulx4x_sub .align 32 .Lmulx4x_sub: movq 0(%rbx),%r11 movq 8(%rbx),%r12 movq 16(%rbx),%r13 movq 24(%rbx),%r14 leaq 32(%rbx),%rbx sbbq 0(%rcx),%r11 sbbq 8(%rcx),%r12 sbbq 16(%rcx),%r13 sbbq 24(%rcx),%r14 leaq 32(%rcx),%rcx movq %r11,0(%rdi) movq %r12,8(%rdi) movq %r13,16(%rdi) movq %r14,24(%rdi) leaq 32(%rdi),%rdi decq %rax jnz .Lmulx4x_sub sbbq $0,%r15 leaq 64(%rsp),%rbx subq %rdx,%rdi .byte 102,73,15,110,207 pxor %xmm0,%xmm0 pshufd $0,%xmm1,%xmm1 movq 40(%rsp),%rsi .cfi_def_cfa %rsi,8 jmp .Lmulx4x_cond_copy .align 32 .Lmulx4x_cond_copy: movdqa 0(%rbx),%xmm2 movdqa 16(%rbx),%xmm3 leaq 32(%rbx),%rbx movdqu 0(%rdi),%xmm4 movdqu 16(%rdi),%xmm5 leaq 32(%rdi),%rdi movdqa %xmm0,-32(%rbx) movdqa %xmm0,-16(%rbx) pcmpeqd %xmm1,%xmm0 pand %xmm1,%xmm2 pand %xmm1,%xmm3 pand %xmm0,%xmm4 pand %xmm0,%xmm5 pxor %xmm0,%xmm0 por %xmm2,%xmm4 por %xmm3,%xmm5 movdqu %xmm4,-32(%rdi) movdqu %xmm5,-16(%rdi) subq $32,%rdx jnz .Lmulx4x_cond_copy movq %rdx,(%rbx) movq $1,%rax movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lmulx4x_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size bn_mulx4x_mont,.-bn_mulx4x_mont #endif .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 16 #endif
marvin-hansen/iggy-streaming-system
65,579
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/fipsmodule/aesni-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .extern OPENSSL_ia32cap_P .hidden OPENSSL_ia32cap_P .globl aes_hw_encrypt .hidden aes_hw_encrypt .type aes_hw_encrypt,@function .align 16 aes_hw_encrypt: .cfi_startproc _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST .extern BORINGSSL_function_hit .hidden BORINGSSL_function_hit movb $1,BORINGSSL_function_hit+1(%rip) #endif movups (%rdi),%xmm2 movl 240(%rdx),%eax movups (%rdx),%xmm0 movups 16(%rdx),%xmm1 leaq 32(%rdx),%rdx xorps %xmm0,%xmm2 .Loop_enc1_1: .byte 102,15,56,220,209 decl %eax movups (%rdx),%xmm1 leaq 16(%rdx),%rdx jnz .Loop_enc1_1 .byte 102,15,56,221,209 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 .byte 0xf3,0xc3 .cfi_endproc .size aes_hw_encrypt,.-aes_hw_encrypt .globl aes_hw_decrypt .hidden aes_hw_decrypt .type aes_hw_decrypt,@function .align 16 aes_hw_decrypt: .cfi_startproc _CET_ENDBR movups (%rdi),%xmm2 movl 240(%rdx),%eax movups (%rdx),%xmm0 movups 16(%rdx),%xmm1 leaq 32(%rdx),%rdx xorps %xmm0,%xmm2 .Loop_dec1_2: .byte 102,15,56,222,209 decl %eax movups (%rdx),%xmm1 leaq 16(%rdx),%rdx jnz .Loop_dec1_2 .byte 102,15,56,223,209 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 .byte 0xf3,0xc3 .cfi_endproc .size aes_hw_decrypt, .-aes_hw_decrypt .type _aesni_encrypt2,@function .align 16 _aesni_encrypt2: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax addq $16,%rax .Lenc_loop2: .byte 102,15,56,220,209 .byte 102,15,56,220,217 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 movups -16(%rcx,%rax,1),%xmm0 jnz .Lenc_loop2 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_encrypt2,.-_aesni_encrypt2 .type _aesni_decrypt2,@function .align 16 _aesni_decrypt2: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax addq $16,%rax .Ldec_loop2: .byte 102,15,56,222,209 .byte 102,15,56,222,217 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 movups -16(%rcx,%rax,1),%xmm0 jnz .Ldec_loop2 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_decrypt2,.-_aesni_decrypt2 .type _aesni_encrypt3,@function .align 16 _aesni_encrypt3: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax addq $16,%rax .Lenc_loop3: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 movups -16(%rcx,%rax,1),%xmm0 jnz .Lenc_loop3 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_encrypt3,.-_aesni_encrypt3 .type _aesni_decrypt3,@function .align 16 _aesni_decrypt3: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax addq $16,%rax .Ldec_loop3: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 movups -16(%rcx,%rax,1),%xmm0 jnz .Ldec_loop3 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_decrypt3,.-_aesni_decrypt3 .type _aesni_encrypt4,@function .align 16 _aesni_encrypt4: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 xorps %xmm0,%xmm5 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 0x0f,0x1f,0x00 addq $16,%rax .Lenc_loop4: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 movups -16(%rcx,%rax,1),%xmm0 jnz .Lenc_loop4 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_encrypt4,.-_aesni_encrypt4 .type _aesni_decrypt4,@function .align 16 _aesni_decrypt4: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 xorps %xmm0,%xmm5 movups 32(%rcx),%xmm0 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 0x0f,0x1f,0x00 addq $16,%rax .Ldec_loop4: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 movups -16(%rcx,%rax,1),%xmm0 jnz .Ldec_loop4 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_decrypt4,.-_aesni_decrypt4 .type _aesni_encrypt6,@function .align 16 _aesni_encrypt6: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 pxor %xmm0,%xmm3 pxor %xmm0,%xmm4 .byte 102,15,56,220,209 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,220,217 pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 .byte 102,15,56,220,225 pxor %xmm0,%xmm7 movups (%rcx,%rax,1),%xmm0 addq $16,%rax jmp .Lenc_loop6_enter .align 16 .Lenc_loop6: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .Lenc_loop6_enter: .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 movups -16(%rcx,%rax,1),%xmm0 jnz .Lenc_loop6 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 102,15,56,221,240 .byte 102,15,56,221,248 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_encrypt6,.-_aesni_encrypt6 .type _aesni_decrypt6,@function .align 16 _aesni_decrypt6: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 pxor %xmm0,%xmm3 pxor %xmm0,%xmm4 .byte 102,15,56,222,209 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,222,217 pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 .byte 102,15,56,222,225 pxor %xmm0,%xmm7 movups (%rcx,%rax,1),%xmm0 addq $16,%rax jmp .Ldec_loop6_enter .align 16 .Ldec_loop6: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .Ldec_loop6_enter: .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 movups -16(%rcx,%rax,1),%xmm0 jnz .Ldec_loop6 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 102,15,56,223,240 .byte 102,15,56,223,248 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_decrypt6,.-_aesni_decrypt6 .type _aesni_encrypt8,@function .align 16 _aesni_encrypt8: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 pxor %xmm0,%xmm4 pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,220,209 pxor %xmm0,%xmm7 pxor %xmm0,%xmm8 .byte 102,15,56,220,217 pxor %xmm0,%xmm9 movups (%rcx,%rax,1),%xmm0 addq $16,%rax jmp .Lenc_loop8_inner .align 16 .Lenc_loop8: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .Lenc_loop8_inner: .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 .Lenc_loop8_enter: movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups -16(%rcx,%rax,1),%xmm0 jnz .Lenc_loop8 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 102,15,56,221,240 .byte 102,15,56,221,248 .byte 102,68,15,56,221,192 .byte 102,68,15,56,221,200 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_encrypt8,.-_aesni_encrypt8 .type _aesni_decrypt8,@function .align 16 _aesni_decrypt8: .cfi_startproc movups (%rcx),%xmm0 shll $4,%eax movups 16(%rcx),%xmm1 xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 pxor %xmm0,%xmm4 pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 leaq 32(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,222,209 pxor %xmm0,%xmm7 pxor %xmm0,%xmm8 .byte 102,15,56,222,217 pxor %xmm0,%xmm9 movups (%rcx,%rax,1),%xmm0 addq $16,%rax jmp .Ldec_loop8_inner .align 16 .Ldec_loop8: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .Ldec_loop8_inner: .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 .Ldec_loop8_enter: movups (%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups -16(%rcx,%rax,1),%xmm0 jnz .Ldec_loop8 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 102,15,56,223,240 .byte 102,15,56,223,248 .byte 102,68,15,56,223,192 .byte 102,68,15,56,223,200 .byte 0xf3,0xc3 .cfi_endproc .size _aesni_decrypt8,.-_aesni_decrypt8 .globl aes_hw_ecb_encrypt .hidden aes_hw_ecb_encrypt .type aes_hw_ecb_encrypt,@function .align 16 aes_hw_ecb_encrypt: .cfi_startproc _CET_ENDBR andq $-16,%rdx jz .Lecb_ret movl 240(%rcx),%eax movups (%rcx),%xmm0 movq %rcx,%r11 movl %eax,%r10d testl %r8d,%r8d jz .Lecb_decrypt cmpq $0x80,%rdx jb .Lecb_enc_tail movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 movdqu 48(%rdi),%xmm5 movdqu 64(%rdi),%xmm6 movdqu 80(%rdi),%xmm7 movdqu 96(%rdi),%xmm8 movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi subq $0x80,%rdx jmp .Lecb_enc_loop8_enter .align 16 .Lecb_enc_loop8: movups %xmm2,(%rsi) movq %r11,%rcx movdqu (%rdi),%xmm2 movl %r10d,%eax movups %xmm3,16(%rsi) movdqu 16(%rdi),%xmm3 movups %xmm4,32(%rsi) movdqu 32(%rdi),%xmm4 movups %xmm5,48(%rsi) movdqu 48(%rdi),%xmm5 movups %xmm6,64(%rsi) movdqu 64(%rdi),%xmm6 movups %xmm7,80(%rsi) movdqu 80(%rdi),%xmm7 movups %xmm8,96(%rsi) movdqu 96(%rdi),%xmm8 movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi .Lecb_enc_loop8_enter: call _aesni_encrypt8 subq $0x80,%rdx jnc .Lecb_enc_loop8 movups %xmm2,(%rsi) movq %r11,%rcx movups %xmm3,16(%rsi) movl %r10d,%eax movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) movups %xmm8,96(%rsi) movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi addq $0x80,%rdx jz .Lecb_ret .Lecb_enc_tail: movups (%rdi),%xmm2 cmpq $0x20,%rdx jb .Lecb_enc_one movups 16(%rdi),%xmm3 je .Lecb_enc_two movups 32(%rdi),%xmm4 cmpq $0x40,%rdx jb .Lecb_enc_three movups 48(%rdi),%xmm5 je .Lecb_enc_four movups 64(%rdi),%xmm6 cmpq $0x60,%rdx jb .Lecb_enc_five movups 80(%rdi),%xmm7 je .Lecb_enc_six movdqu 96(%rdi),%xmm8 xorps %xmm9,%xmm9 call _aesni_encrypt8 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) movups %xmm8,96(%rsi) jmp .Lecb_ret .align 16 .Lecb_enc_one: movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_3: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_3 .byte 102,15,56,221,209 movups %xmm2,(%rsi) jmp .Lecb_ret .align 16 .Lecb_enc_two: call _aesni_encrypt2 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) jmp .Lecb_ret .align 16 .Lecb_enc_three: call _aesni_encrypt3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) jmp .Lecb_ret .align 16 .Lecb_enc_four: call _aesni_encrypt4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) jmp .Lecb_ret .align 16 .Lecb_enc_five: xorps %xmm7,%xmm7 call _aesni_encrypt6 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) jmp .Lecb_ret .align 16 .Lecb_enc_six: call _aesni_encrypt6 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) jmp .Lecb_ret .align 16 .Lecb_decrypt: cmpq $0x80,%rdx jb .Lecb_dec_tail movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 movdqu 48(%rdi),%xmm5 movdqu 64(%rdi),%xmm6 movdqu 80(%rdi),%xmm7 movdqu 96(%rdi),%xmm8 movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi subq $0x80,%rdx jmp .Lecb_dec_loop8_enter .align 16 .Lecb_dec_loop8: movups %xmm2,(%rsi) movq %r11,%rcx movdqu (%rdi),%xmm2 movl %r10d,%eax movups %xmm3,16(%rsi) movdqu 16(%rdi),%xmm3 movups %xmm4,32(%rsi) movdqu 32(%rdi),%xmm4 movups %xmm5,48(%rsi) movdqu 48(%rdi),%xmm5 movups %xmm6,64(%rsi) movdqu 64(%rdi),%xmm6 movups %xmm7,80(%rsi) movdqu 80(%rdi),%xmm7 movups %xmm8,96(%rsi) movdqu 96(%rdi),%xmm8 movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi .Lecb_dec_loop8_enter: call _aesni_decrypt8 movups (%r11),%xmm0 subq $0x80,%rdx jnc .Lecb_dec_loop8 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movq %r11,%rcx movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movl %r10d,%eax movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 movups %xmm6,64(%rsi) pxor %xmm6,%xmm6 movups %xmm7,80(%rsi) pxor %xmm7,%xmm7 movups %xmm8,96(%rsi) pxor %xmm8,%xmm8 movups %xmm9,112(%rsi) pxor %xmm9,%xmm9 leaq 128(%rsi),%rsi addq $0x80,%rdx jz .Lecb_ret .Lecb_dec_tail: movups (%rdi),%xmm2 cmpq $0x20,%rdx jb .Lecb_dec_one movups 16(%rdi),%xmm3 je .Lecb_dec_two movups 32(%rdi),%xmm4 cmpq $0x40,%rdx jb .Lecb_dec_three movups 48(%rdi),%xmm5 je .Lecb_dec_four movups 64(%rdi),%xmm6 cmpq $0x60,%rdx jb .Lecb_dec_five movups 80(%rdi),%xmm7 je .Lecb_dec_six movups 96(%rdi),%xmm8 movups (%rcx),%xmm0 xorps %xmm9,%xmm9 call _aesni_decrypt8 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 movups %xmm6,64(%rsi) pxor %xmm6,%xmm6 movups %xmm7,80(%rsi) pxor %xmm7,%xmm7 movups %xmm8,96(%rsi) pxor %xmm8,%xmm8 pxor %xmm9,%xmm9 jmp .Lecb_ret .align 16 .Lecb_dec_one: movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_4: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_4 .byte 102,15,56,223,209 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 jmp .Lecb_ret .align 16 .Lecb_dec_two: call _aesni_decrypt2 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 jmp .Lecb_ret .align 16 .Lecb_dec_three: call _aesni_decrypt3 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 jmp .Lecb_ret .align 16 .Lecb_dec_four: call _aesni_decrypt4 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 jmp .Lecb_ret .align 16 .Lecb_dec_five: xorps %xmm7,%xmm7 call _aesni_decrypt6 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 movups %xmm6,64(%rsi) pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 jmp .Lecb_ret .align 16 .Lecb_dec_six: call _aesni_decrypt6 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 movups %xmm3,16(%rsi) pxor %xmm3,%xmm3 movups %xmm4,32(%rsi) pxor %xmm4,%xmm4 movups %xmm5,48(%rsi) pxor %xmm5,%xmm5 movups %xmm6,64(%rsi) pxor %xmm6,%xmm6 movups %xmm7,80(%rsi) pxor %xmm7,%xmm7 .Lecb_ret: xorps %xmm0,%xmm0 pxor %xmm1,%xmm1 .byte 0xf3,0xc3 .cfi_endproc .size aes_hw_ecb_encrypt,.-aes_hw_ecb_encrypt .globl aes_hw_ctr32_encrypt_blocks .hidden aes_hw_ctr32_encrypt_blocks .type aes_hw_ctr32_encrypt_blocks,@function .align 16 aes_hw_ctr32_encrypt_blocks: .cfi_startproc _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST movb $1,BORINGSSL_function_hit(%rip) #endif cmpq $1,%rdx jb .Lctr32_epilogue jne .Lctr32_bulk movups (%r8),%xmm2 movups (%rdi),%xmm3 movl 240(%rcx),%edx movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_5: .byte 102,15,56,220,209 decl %edx movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_5 .byte 102,15,56,221,209 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 xorps %xmm3,%xmm2 pxor %xmm3,%xmm3 movups %xmm2,(%rsi) xorps %xmm2,%xmm2 jmp .Lctr32_epilogue .align 16 .Lctr32_bulk: leaq (%rsp),%r11 .cfi_def_cfa_register %r11 pushq %rbp .cfi_offset %rbp,-16 subq $128,%rsp andq $-16,%rsp movdqu (%r8),%xmm2 movdqu (%rcx),%xmm0 movl 12(%r8),%r8d pxor %xmm0,%xmm2 movl 12(%rcx),%ebp movdqa %xmm2,0(%rsp) bswapl %r8d movdqa %xmm2,%xmm3 movdqa %xmm2,%xmm4 movdqa %xmm2,%xmm5 movdqa %xmm2,64(%rsp) movdqa %xmm2,80(%rsp) movdqa %xmm2,96(%rsp) movq %rdx,%r10 movdqa %xmm2,112(%rsp) leaq 1(%r8),%rax leaq 2(%r8),%rdx bswapl %eax bswapl %edx xorl %ebp,%eax xorl %ebp,%edx .byte 102,15,58,34,216,3 leaq 3(%r8),%rax movdqa %xmm3,16(%rsp) .byte 102,15,58,34,226,3 bswapl %eax movq %r10,%rdx leaq 4(%r8),%r10 movdqa %xmm4,32(%rsp) xorl %ebp,%eax bswapl %r10d .byte 102,15,58,34,232,3 xorl %ebp,%r10d movdqa %xmm5,48(%rsp) leaq 5(%r8),%r9 movl %r10d,64+12(%rsp) bswapl %r9d leaq 6(%r8),%r10 movl 240(%rcx),%eax xorl %ebp,%r9d bswapl %r10d movl %r9d,80+12(%rsp) xorl %ebp,%r10d leaq 7(%r8),%r9 movl %r10d,96+12(%rsp) bswapl %r9d xorl %ebp,%r9d movl %r9d,112+12(%rsp) movups 16(%rcx),%xmm1 movdqa 64(%rsp),%xmm6 movdqa 80(%rsp),%xmm7 cmpq $8,%rdx jb .Lctr32_tail leaq 128(%rcx),%rcx subq $8,%rdx jmp .Lctr32_loop8 .align 32 .Lctr32_loop8: addl $8,%r8d movdqa 96(%rsp),%xmm8 .byte 102,15,56,220,209 movl %r8d,%r9d movdqa 112(%rsp),%xmm9 .byte 102,15,56,220,217 bswapl %r9d movups 32-128(%rcx),%xmm0 .byte 102,15,56,220,225 xorl %ebp,%r9d nop .byte 102,15,56,220,233 movl %r9d,0+12(%rsp) leaq 1(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 48-128(%rcx),%xmm1 bswapl %r9d .byte 102,15,56,220,208 .byte 102,15,56,220,216 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,224 .byte 102,15,56,220,232 movl %r9d,16+12(%rsp) leaq 2(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 64-128(%rcx),%xmm0 bswapl %r9d .byte 102,15,56,220,209 .byte 102,15,56,220,217 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movl %r9d,32+12(%rsp) leaq 3(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 80-128(%rcx),%xmm1 bswapl %r9d .byte 102,15,56,220,208 .byte 102,15,56,220,216 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,224 .byte 102,15,56,220,232 movl %r9d,48+12(%rsp) leaq 4(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 96-128(%rcx),%xmm0 bswapl %r9d .byte 102,15,56,220,209 .byte 102,15,56,220,217 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movl %r9d,64+12(%rsp) leaq 5(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 112-128(%rcx),%xmm1 bswapl %r9d .byte 102,15,56,220,208 .byte 102,15,56,220,216 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,224 .byte 102,15,56,220,232 movl %r9d,80+12(%rsp) leaq 6(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 128-128(%rcx),%xmm0 bswapl %r9d .byte 102,15,56,220,209 .byte 102,15,56,220,217 xorl %ebp,%r9d .byte 0x66,0x90 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movl %r9d,96+12(%rsp) leaq 7(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 144-128(%rcx),%xmm1 bswapl %r9d .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 xorl %ebp,%r9d movdqu 0(%rdi),%xmm10 .byte 102,15,56,220,232 movl %r9d,112+12(%rsp) cmpl $11,%eax .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 160-128(%rcx),%xmm0 jb .Lctr32_enc_done .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 176-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 192-128(%rcx),%xmm0 je .Lctr32_enc_done .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 208-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 224-128(%rcx),%xmm0 jmp .Lctr32_enc_done .align 16 .Lctr32_enc_done: movdqu 16(%rdi),%xmm11 pxor %xmm0,%xmm10 movdqu 32(%rdi),%xmm12 pxor %xmm0,%xmm11 movdqu 48(%rdi),%xmm13 pxor %xmm0,%xmm12 movdqu 64(%rdi),%xmm14 pxor %xmm0,%xmm13 movdqu 80(%rdi),%xmm15 pxor %xmm0,%xmm14 prefetcht0 448(%rdi) prefetcht0 512(%rdi) pxor %xmm0,%xmm15 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movdqu 96(%rdi),%xmm1 leaq 128(%rdi),%rdi .byte 102,65,15,56,221,210 pxor %xmm0,%xmm1 movdqu 112-128(%rdi),%xmm10 .byte 102,65,15,56,221,219 pxor %xmm0,%xmm10 movdqa 0(%rsp),%xmm11 .byte 102,65,15,56,221,228 .byte 102,65,15,56,221,237 movdqa 16(%rsp),%xmm12 movdqa 32(%rsp),%xmm13 .byte 102,65,15,56,221,246 .byte 102,65,15,56,221,255 movdqa 48(%rsp),%xmm14 movdqa 64(%rsp),%xmm15 .byte 102,68,15,56,221,193 movdqa 80(%rsp),%xmm0 movups 16-128(%rcx),%xmm1 .byte 102,69,15,56,221,202 movups %xmm2,(%rsi) movdqa %xmm11,%xmm2 movups %xmm3,16(%rsi) movdqa %xmm12,%xmm3 movups %xmm4,32(%rsi) movdqa %xmm13,%xmm4 movups %xmm5,48(%rsi) movdqa %xmm14,%xmm5 movups %xmm6,64(%rsi) movdqa %xmm15,%xmm6 movups %xmm7,80(%rsi) movdqa %xmm0,%xmm7 movups %xmm8,96(%rsi) movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi subq $8,%rdx jnc .Lctr32_loop8 addq $8,%rdx jz .Lctr32_done leaq -128(%rcx),%rcx .Lctr32_tail: leaq 16(%rcx),%rcx cmpq $4,%rdx jb .Lctr32_loop3 je .Lctr32_loop4 shll $4,%eax movdqa 96(%rsp),%xmm8 pxor %xmm9,%xmm9 movups 16(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 102,15,56,220,217 leaq 32-16(%rcx,%rax,1),%rcx negq %rax .byte 102,15,56,220,225 addq $16,%rax movups (%rdi),%xmm10 .byte 102,15,56,220,233 .byte 102,15,56,220,241 movups 16(%rdi),%xmm11 movups 32(%rdi),%xmm12 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 call .Lenc_loop8_enter movdqu 48(%rdi),%xmm13 pxor %xmm10,%xmm2 movdqu 64(%rdi),%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm10,%xmm6 movdqu %xmm5,48(%rsi) movdqu %xmm6,64(%rsi) cmpq $6,%rdx jb .Lctr32_done movups 80(%rdi),%xmm11 xorps %xmm11,%xmm7 movups %xmm7,80(%rsi) je .Lctr32_done movups 96(%rdi),%xmm12 xorps %xmm12,%xmm8 movups %xmm8,96(%rsi) jmp .Lctr32_done .align 32 .Lctr32_loop4: .byte 102,15,56,220,209 leaq 16(%rcx),%rcx decl %eax .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movups (%rcx),%xmm1 jnz .Lctr32_loop4 .byte 102,15,56,221,209 .byte 102,15,56,221,217 movups (%rdi),%xmm10 movups 16(%rdi),%xmm11 .byte 102,15,56,221,225 .byte 102,15,56,221,233 movups 32(%rdi),%xmm12 movups 48(%rdi),%xmm13 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) xorps %xmm11,%xmm3 movups %xmm3,16(%rsi) pxor %xmm12,%xmm4 movdqu %xmm4,32(%rsi) pxor %xmm13,%xmm5 movdqu %xmm5,48(%rsi) jmp .Lctr32_done .align 32 .Lctr32_loop3: .byte 102,15,56,220,209 leaq 16(%rcx),%rcx decl %eax .byte 102,15,56,220,217 .byte 102,15,56,220,225 movups (%rcx),%xmm1 jnz .Lctr32_loop3 .byte 102,15,56,221,209 .byte 102,15,56,221,217 .byte 102,15,56,221,225 movups (%rdi),%xmm10 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) cmpq $2,%rdx jb .Lctr32_done movups 16(%rdi),%xmm11 xorps %xmm11,%xmm3 movups %xmm3,16(%rsi) je .Lctr32_done movups 32(%rdi),%xmm12 xorps %xmm12,%xmm4 movups %xmm4,32(%rsi) .Lctr32_done: xorps %xmm0,%xmm0 xorl %ebp,%ebp pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 movaps %xmm0,0(%rsp) pxor %xmm8,%xmm8 movaps %xmm0,16(%rsp) pxor %xmm9,%xmm9 movaps %xmm0,32(%rsp) pxor %xmm10,%xmm10 movaps %xmm0,48(%rsp) pxor %xmm11,%xmm11 movaps %xmm0,64(%rsp) pxor %xmm12,%xmm12 movaps %xmm0,80(%rsp) pxor %xmm13,%xmm13 movaps %xmm0,96(%rsp) pxor %xmm14,%xmm14 movaps %xmm0,112(%rsp) pxor %xmm15,%xmm15 movq -8(%r11),%rbp .cfi_restore %rbp leaq (%r11),%rsp .cfi_def_cfa_register %rsp .Lctr32_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size aes_hw_ctr32_encrypt_blocks,.-aes_hw_ctr32_encrypt_blocks .globl aes_hw_xts_encrypt .hidden aes_hw_xts_encrypt .type aes_hw_xts_encrypt,@function .align 16 aes_hw_xts_encrypt: .cfi_startproc _CET_ENDBR leaq (%rsp),%r11 .cfi_def_cfa_register %r11 pushq %rbp .cfi_offset %rbp,-16 subq $112,%rsp andq $-16,%rsp movups (%r9),%xmm2 movl 240(%r8),%eax movl 240(%rcx),%r10d movups (%r8),%xmm0 movups 16(%r8),%xmm1 leaq 32(%r8),%r8 xorps %xmm0,%xmm2 .Loop_enc1_6: .byte 102,15,56,220,209 decl %eax movups (%r8),%xmm1 leaq 16(%r8),%r8 jnz .Loop_enc1_6 .byte 102,15,56,221,209 movups (%rcx),%xmm0 movq %rcx,%rbp movl %r10d,%eax shll $4,%r10d movq %rdx,%r9 andq $-16,%rdx movups 16(%rcx,%r10,1),%xmm1 movdqa .Lxts_magic(%rip),%xmm8 movdqa %xmm2,%xmm15 pshufd $0x5f,%xmm2,%xmm9 pxor %xmm0,%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm10 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm10 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm11 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm11 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm12 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm12 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm13 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm13 pxor %xmm14,%xmm15 movdqa %xmm15,%xmm14 psrad $31,%xmm9 paddq %xmm15,%xmm15 pand %xmm8,%xmm9 pxor %xmm0,%xmm14 pxor %xmm9,%xmm15 movaps %xmm1,96(%rsp) subq $96,%rdx jc .Lxts_enc_short movl $16+96,%eax leaq 32(%rbp,%r10,1),%rcx subq %r10,%rax movups 16(%rbp),%xmm1 movq %rax,%r10 leaq .Lxts_magic(%rip),%r8 jmp .Lxts_enc_grandloop .align 32 .Lxts_enc_grandloop: movdqu 0(%rdi),%xmm2 movdqa %xmm0,%xmm8 movdqu 16(%rdi),%xmm3 pxor %xmm10,%xmm2 movdqu 32(%rdi),%xmm4 pxor %xmm11,%xmm3 .byte 102,15,56,220,209 movdqu 48(%rdi),%xmm5 pxor %xmm12,%xmm4 .byte 102,15,56,220,217 movdqu 64(%rdi),%xmm6 pxor %xmm13,%xmm5 .byte 102,15,56,220,225 movdqu 80(%rdi),%xmm7 pxor %xmm15,%xmm8 movdqa 96(%rsp),%xmm9 pxor %xmm14,%xmm6 .byte 102,15,56,220,233 movups 32(%rbp),%xmm0 leaq 96(%rdi),%rdi pxor %xmm8,%xmm7 pxor %xmm9,%xmm10 .byte 102,15,56,220,241 pxor %xmm9,%xmm11 movdqa %xmm10,0(%rsp) .byte 102,15,56,220,249 movups 48(%rbp),%xmm1 pxor %xmm9,%xmm12 .byte 102,15,56,220,208 pxor %xmm9,%xmm13 movdqa %xmm11,16(%rsp) .byte 102,15,56,220,216 pxor %xmm9,%xmm14 movdqa %xmm12,32(%rsp) .byte 102,15,56,220,224 .byte 102,15,56,220,232 pxor %xmm9,%xmm8 movdqa %xmm14,64(%rsp) .byte 102,15,56,220,240 .byte 102,15,56,220,248 movups 64(%rbp),%xmm0 movdqa %xmm8,80(%rsp) pshufd $0x5f,%xmm15,%xmm9 jmp .Lxts_enc_loop6 .align 32 .Lxts_enc_loop6: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 movups -64(%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 movups -80(%rcx,%rax,1),%xmm0 jnz .Lxts_enc_loop6 movdqa (%r8),%xmm8 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 paddq %xmm15,%xmm15 psrad $31,%xmm14 .byte 102,15,56,220,217 pand %xmm8,%xmm14 movups (%rbp),%xmm10 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 pxor %xmm14,%xmm15 movaps %xmm10,%xmm11 .byte 102,15,56,220,249 movups -64(%rcx),%xmm1 movdqa %xmm9,%xmm14 .byte 102,15,56,220,208 paddd %xmm9,%xmm9 pxor %xmm15,%xmm10 .byte 102,15,56,220,216 psrad $31,%xmm14 paddq %xmm15,%xmm15 .byte 102,15,56,220,224 .byte 102,15,56,220,232 pand %xmm8,%xmm14 movaps %xmm11,%xmm12 .byte 102,15,56,220,240 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 .byte 102,15,56,220,248 movups -48(%rcx),%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 pxor %xmm15,%xmm11 psrad $31,%xmm14 .byte 102,15,56,220,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movdqa %xmm13,48(%rsp) pxor %xmm14,%xmm15 .byte 102,15,56,220,241 movaps %xmm12,%xmm13 movdqa %xmm9,%xmm14 .byte 102,15,56,220,249 movups -32(%rcx),%xmm1 paddd %xmm9,%xmm9 .byte 102,15,56,220,208 pxor %xmm15,%xmm12 psrad $31,%xmm14 .byte 102,15,56,220,216 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 pxor %xmm14,%xmm15 movaps %xmm13,%xmm14 .byte 102,15,56,220,248 movdqa %xmm9,%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 pxor %xmm15,%xmm13 psrad $31,%xmm0 .byte 102,15,56,220,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm0 .byte 102,15,56,220,225 .byte 102,15,56,220,233 pxor %xmm0,%xmm15 movups (%rbp),%xmm0 .byte 102,15,56,220,241 .byte 102,15,56,220,249 movups 16(%rbp),%xmm1 pxor %xmm15,%xmm14 .byte 102,15,56,221,84,36,0 psrad $31,%xmm9 paddq %xmm15,%xmm15 .byte 102,15,56,221,92,36,16 .byte 102,15,56,221,100,36,32 pand %xmm8,%xmm9 movq %r10,%rax .byte 102,15,56,221,108,36,48 .byte 102,15,56,221,116,36,64 .byte 102,15,56,221,124,36,80 pxor %xmm9,%xmm15 leaq 96(%rsi),%rsi movups %xmm2,-96(%rsi) movups %xmm3,-80(%rsi) movups %xmm4,-64(%rsi) movups %xmm5,-48(%rsi) movups %xmm6,-32(%rsi) movups %xmm7,-16(%rsi) subq $96,%rdx jnc .Lxts_enc_grandloop movl $16+96,%eax subl %r10d,%eax movq %rbp,%rcx shrl $4,%eax .Lxts_enc_short: movl %eax,%r10d pxor %xmm0,%xmm10 addq $96,%rdx jz .Lxts_enc_done pxor %xmm0,%xmm11 cmpq $0x20,%rdx jb .Lxts_enc_one pxor %xmm0,%xmm12 je .Lxts_enc_two pxor %xmm0,%xmm13 cmpq $0x40,%rdx jb .Lxts_enc_three pxor %xmm0,%xmm14 je .Lxts_enc_four movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 pxor %xmm10,%xmm2 movdqu 48(%rdi),%xmm5 pxor %xmm11,%xmm3 movdqu 64(%rdi),%xmm6 leaq 80(%rdi),%rdi pxor %xmm12,%xmm4 pxor %xmm13,%xmm5 pxor %xmm14,%xmm6 pxor %xmm7,%xmm7 call _aesni_encrypt6 xorps %xmm10,%xmm2 movdqa %xmm15,%xmm10 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movdqu %xmm2,(%rsi) xorps %xmm13,%xmm5 movdqu %xmm3,16(%rsi) xorps %xmm14,%xmm6 movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) movdqu %xmm6,64(%rsi) leaq 80(%rsi),%rsi jmp .Lxts_enc_done .align 16 .Lxts_enc_one: movups (%rdi),%xmm2 leaq 16(%rdi),%rdi xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_7: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_7 .byte 102,15,56,221,209 xorps %xmm10,%xmm2 movdqa %xmm11,%xmm10 movups %xmm2,(%rsi) leaq 16(%rsi),%rsi jmp .Lxts_enc_done .align 16 .Lxts_enc_two: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 leaq 32(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 call _aesni_encrypt2 xorps %xmm10,%xmm2 movdqa %xmm12,%xmm10 xorps %xmm11,%xmm3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) leaq 32(%rsi),%rsi jmp .Lxts_enc_done .align 16 .Lxts_enc_three: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 leaq 48(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 call _aesni_encrypt3 xorps %xmm10,%xmm2 movdqa %xmm13,%xmm10 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) leaq 48(%rsi),%rsi jmp .Lxts_enc_done .align 16 .Lxts_enc_four: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 xorps %xmm10,%xmm2 movups 48(%rdi),%xmm5 leaq 64(%rdi),%rdi xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 xorps %xmm13,%xmm5 call _aesni_encrypt4 pxor %xmm10,%xmm2 movdqa %xmm14,%xmm10 pxor %xmm11,%xmm3 pxor %xmm12,%xmm4 movdqu %xmm2,(%rsi) pxor %xmm13,%xmm5 movdqu %xmm3,16(%rsi) movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) leaq 64(%rsi),%rsi jmp .Lxts_enc_done .align 16 .Lxts_enc_done: andq $15,%r9 jz .Lxts_enc_ret movq %r9,%rdx .Lxts_enc_steal: movzbl (%rdi),%eax movzbl -16(%rsi),%ecx leaq 1(%rdi),%rdi movb %al,-16(%rsi) movb %cl,0(%rsi) leaq 1(%rsi),%rsi subq $1,%rdx jnz .Lxts_enc_steal subq %r9,%rsi movq %rbp,%rcx movl %r10d,%eax movups -16(%rsi),%xmm2 xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_8: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_8 .byte 102,15,56,221,209 xorps %xmm10,%xmm2 movups %xmm2,-16(%rsi) .Lxts_enc_ret: xorps %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 movaps %xmm0,0(%rsp) pxor %xmm8,%xmm8 movaps %xmm0,16(%rsp) pxor %xmm9,%xmm9 movaps %xmm0,32(%rsp) pxor %xmm10,%xmm10 movaps %xmm0,48(%rsp) pxor %xmm11,%xmm11 movaps %xmm0,64(%rsp) pxor %xmm12,%xmm12 movaps %xmm0,80(%rsp) pxor %xmm13,%xmm13 movaps %xmm0,96(%rsp) pxor %xmm14,%xmm14 pxor %xmm15,%xmm15 movq -8(%r11),%rbp .cfi_restore %rbp leaq (%r11),%rsp .cfi_def_cfa_register %rsp .Lxts_enc_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size aes_hw_xts_encrypt,.-aes_hw_xts_encrypt .globl aes_hw_xts_decrypt .hidden aes_hw_xts_decrypt .type aes_hw_xts_decrypt,@function .align 16 aes_hw_xts_decrypt: .cfi_startproc _CET_ENDBR leaq (%rsp),%r11 .cfi_def_cfa_register %r11 pushq %rbp .cfi_offset %rbp,-16 subq $112,%rsp andq $-16,%rsp movups (%r9),%xmm2 movl 240(%r8),%eax movl 240(%rcx),%r10d movups (%r8),%xmm0 movups 16(%r8),%xmm1 leaq 32(%r8),%r8 xorps %xmm0,%xmm2 .Loop_enc1_9: .byte 102,15,56,220,209 decl %eax movups (%r8),%xmm1 leaq 16(%r8),%r8 jnz .Loop_enc1_9 .byte 102,15,56,221,209 xorl %eax,%eax testq $15,%rdx setnz %al shlq $4,%rax subq %rax,%rdx movups (%rcx),%xmm0 movq %rcx,%rbp movl %r10d,%eax shll $4,%r10d movq %rdx,%r9 andq $-16,%rdx movups 16(%rcx,%r10,1),%xmm1 movdqa .Lxts_magic(%rip),%xmm8 movdqa %xmm2,%xmm15 pshufd $0x5f,%xmm2,%xmm9 pxor %xmm0,%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm10 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm10 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm11 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm11 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm12 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm12 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm13 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm13 pxor %xmm14,%xmm15 movdqa %xmm15,%xmm14 psrad $31,%xmm9 paddq %xmm15,%xmm15 pand %xmm8,%xmm9 pxor %xmm0,%xmm14 pxor %xmm9,%xmm15 movaps %xmm1,96(%rsp) subq $96,%rdx jc .Lxts_dec_short movl $16+96,%eax leaq 32(%rbp,%r10,1),%rcx subq %r10,%rax movups 16(%rbp),%xmm1 movq %rax,%r10 leaq .Lxts_magic(%rip),%r8 jmp .Lxts_dec_grandloop .align 32 .Lxts_dec_grandloop: movdqu 0(%rdi),%xmm2 movdqa %xmm0,%xmm8 movdqu 16(%rdi),%xmm3 pxor %xmm10,%xmm2 movdqu 32(%rdi),%xmm4 pxor %xmm11,%xmm3 .byte 102,15,56,222,209 movdqu 48(%rdi),%xmm5 pxor %xmm12,%xmm4 .byte 102,15,56,222,217 movdqu 64(%rdi),%xmm6 pxor %xmm13,%xmm5 .byte 102,15,56,222,225 movdqu 80(%rdi),%xmm7 pxor %xmm15,%xmm8 movdqa 96(%rsp),%xmm9 pxor %xmm14,%xmm6 .byte 102,15,56,222,233 movups 32(%rbp),%xmm0 leaq 96(%rdi),%rdi pxor %xmm8,%xmm7 pxor %xmm9,%xmm10 .byte 102,15,56,222,241 pxor %xmm9,%xmm11 movdqa %xmm10,0(%rsp) .byte 102,15,56,222,249 movups 48(%rbp),%xmm1 pxor %xmm9,%xmm12 .byte 102,15,56,222,208 pxor %xmm9,%xmm13 movdqa %xmm11,16(%rsp) .byte 102,15,56,222,216 pxor %xmm9,%xmm14 movdqa %xmm12,32(%rsp) .byte 102,15,56,222,224 .byte 102,15,56,222,232 pxor %xmm9,%xmm8 movdqa %xmm14,64(%rsp) .byte 102,15,56,222,240 .byte 102,15,56,222,248 movups 64(%rbp),%xmm0 movdqa %xmm8,80(%rsp) pshufd $0x5f,%xmm15,%xmm9 jmp .Lxts_dec_loop6 .align 32 .Lxts_dec_loop6: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 movups -64(%rcx,%rax,1),%xmm1 addq $32,%rax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 movups -80(%rcx,%rax,1),%xmm0 jnz .Lxts_dec_loop6 movdqa (%r8),%xmm8 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 paddq %xmm15,%xmm15 psrad $31,%xmm14 .byte 102,15,56,222,217 pand %xmm8,%xmm14 movups (%rbp),%xmm10 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 pxor %xmm14,%xmm15 movaps %xmm10,%xmm11 .byte 102,15,56,222,249 movups -64(%rcx),%xmm1 movdqa %xmm9,%xmm14 .byte 102,15,56,222,208 paddd %xmm9,%xmm9 pxor %xmm15,%xmm10 .byte 102,15,56,222,216 psrad $31,%xmm14 paddq %xmm15,%xmm15 .byte 102,15,56,222,224 .byte 102,15,56,222,232 pand %xmm8,%xmm14 movaps %xmm11,%xmm12 .byte 102,15,56,222,240 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 .byte 102,15,56,222,248 movups -48(%rcx),%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 pxor %xmm15,%xmm11 psrad $31,%xmm14 .byte 102,15,56,222,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,222,225 .byte 102,15,56,222,233 movdqa %xmm13,48(%rsp) pxor %xmm14,%xmm15 .byte 102,15,56,222,241 movaps %xmm12,%xmm13 movdqa %xmm9,%xmm14 .byte 102,15,56,222,249 movups -32(%rcx),%xmm1 paddd %xmm9,%xmm9 .byte 102,15,56,222,208 pxor %xmm15,%xmm12 psrad $31,%xmm14 .byte 102,15,56,222,216 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 pxor %xmm14,%xmm15 movaps %xmm13,%xmm14 .byte 102,15,56,222,248 movdqa %xmm9,%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 pxor %xmm15,%xmm13 psrad $31,%xmm0 .byte 102,15,56,222,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm0 .byte 102,15,56,222,225 .byte 102,15,56,222,233 pxor %xmm0,%xmm15 movups (%rbp),%xmm0 .byte 102,15,56,222,241 .byte 102,15,56,222,249 movups 16(%rbp),%xmm1 pxor %xmm15,%xmm14 .byte 102,15,56,223,84,36,0 psrad $31,%xmm9 paddq %xmm15,%xmm15 .byte 102,15,56,223,92,36,16 .byte 102,15,56,223,100,36,32 pand %xmm8,%xmm9 movq %r10,%rax .byte 102,15,56,223,108,36,48 .byte 102,15,56,223,116,36,64 .byte 102,15,56,223,124,36,80 pxor %xmm9,%xmm15 leaq 96(%rsi),%rsi movups %xmm2,-96(%rsi) movups %xmm3,-80(%rsi) movups %xmm4,-64(%rsi) movups %xmm5,-48(%rsi) movups %xmm6,-32(%rsi) movups %xmm7,-16(%rsi) subq $96,%rdx jnc .Lxts_dec_grandloop movl $16+96,%eax subl %r10d,%eax movq %rbp,%rcx shrl $4,%eax .Lxts_dec_short: movl %eax,%r10d pxor %xmm0,%xmm10 pxor %xmm0,%xmm11 addq $96,%rdx jz .Lxts_dec_done pxor %xmm0,%xmm12 cmpq $0x20,%rdx jb .Lxts_dec_one pxor %xmm0,%xmm13 je .Lxts_dec_two pxor %xmm0,%xmm14 cmpq $0x40,%rdx jb .Lxts_dec_three je .Lxts_dec_four movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 pxor %xmm10,%xmm2 movdqu 48(%rdi),%xmm5 pxor %xmm11,%xmm3 movdqu 64(%rdi),%xmm6 leaq 80(%rdi),%rdi pxor %xmm12,%xmm4 pxor %xmm13,%xmm5 pxor %xmm14,%xmm6 call _aesni_decrypt6 xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movdqu %xmm2,(%rsi) xorps %xmm13,%xmm5 movdqu %xmm3,16(%rsi) xorps %xmm14,%xmm6 movdqu %xmm4,32(%rsi) pxor %xmm14,%xmm14 movdqu %xmm5,48(%rsi) pcmpgtd %xmm15,%xmm14 movdqu %xmm6,64(%rsi) leaq 80(%rsi),%rsi pshufd $0x13,%xmm14,%xmm11 andq $15,%r9 jz .Lxts_dec_ret movdqa %xmm15,%xmm10 paddq %xmm15,%xmm15 pand %xmm8,%xmm11 pxor %xmm15,%xmm11 jmp .Lxts_dec_done2 .align 16 .Lxts_dec_one: movups (%rdi),%xmm2 leaq 16(%rdi),%rdi xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_10: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_10 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movdqa %xmm11,%xmm10 movups %xmm2,(%rsi) movdqa %xmm12,%xmm11 leaq 16(%rsi),%rsi jmp .Lxts_dec_done .align 16 .Lxts_dec_two: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 leaq 32(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 call _aesni_decrypt2 xorps %xmm10,%xmm2 movdqa %xmm12,%xmm10 xorps %xmm11,%xmm3 movdqa %xmm13,%xmm11 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) leaq 32(%rsi),%rsi jmp .Lxts_dec_done .align 16 .Lxts_dec_three: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 leaq 48(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 call _aesni_decrypt3 xorps %xmm10,%xmm2 movdqa %xmm13,%xmm10 xorps %xmm11,%xmm3 movdqa %xmm14,%xmm11 xorps %xmm12,%xmm4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) leaq 48(%rsi),%rsi jmp .Lxts_dec_done .align 16 .Lxts_dec_four: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 xorps %xmm10,%xmm2 movups 48(%rdi),%xmm5 leaq 64(%rdi),%rdi xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 xorps %xmm13,%xmm5 call _aesni_decrypt4 pxor %xmm10,%xmm2 movdqa %xmm14,%xmm10 pxor %xmm11,%xmm3 movdqa %xmm15,%xmm11 pxor %xmm12,%xmm4 movdqu %xmm2,(%rsi) pxor %xmm13,%xmm5 movdqu %xmm3,16(%rsi) movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) leaq 64(%rsi),%rsi jmp .Lxts_dec_done .align 16 .Lxts_dec_done: andq $15,%r9 jz .Lxts_dec_ret .Lxts_dec_done2: movq %r9,%rdx movq %rbp,%rcx movl %r10d,%eax movups (%rdi),%xmm2 xorps %xmm11,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_11: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_11 .byte 102,15,56,223,209 xorps %xmm11,%xmm2 movups %xmm2,(%rsi) .Lxts_dec_steal: movzbl 16(%rdi),%eax movzbl (%rsi),%ecx leaq 1(%rdi),%rdi movb %al,(%rsi) movb %cl,16(%rsi) leaq 1(%rsi),%rsi subq $1,%rdx jnz .Lxts_dec_steal subq %r9,%rsi movq %rbp,%rcx movl %r10d,%eax movups (%rsi),%xmm2 xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_12: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_12 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) .Lxts_dec_ret: xorps %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 movaps %xmm0,0(%rsp) pxor %xmm8,%xmm8 movaps %xmm0,16(%rsp) pxor %xmm9,%xmm9 movaps %xmm0,32(%rsp) pxor %xmm10,%xmm10 movaps %xmm0,48(%rsp) pxor %xmm11,%xmm11 movaps %xmm0,64(%rsp) pxor %xmm12,%xmm12 movaps %xmm0,80(%rsp) pxor %xmm13,%xmm13 movaps %xmm0,96(%rsp) pxor %xmm14,%xmm14 pxor %xmm15,%xmm15 movq -8(%r11),%rbp .cfi_restore %rbp leaq (%r11),%rsp .cfi_def_cfa_register %rsp .Lxts_dec_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size aes_hw_xts_decrypt,.-aes_hw_xts_decrypt .globl aes_hw_cbc_encrypt .hidden aes_hw_cbc_encrypt .type aes_hw_cbc_encrypt,@function .align 16 aes_hw_cbc_encrypt: .cfi_startproc _CET_ENDBR testq %rdx,%rdx jz .Lcbc_ret movl 240(%rcx),%r10d movq %rcx,%r11 testl %r9d,%r9d jz .Lcbc_decrypt movups (%r8),%xmm2 movl %r10d,%eax cmpq $16,%rdx jb .Lcbc_enc_tail subq $16,%rdx jmp .Lcbc_enc_loop .align 16 .Lcbc_enc_loop: movups (%rdi),%xmm3 leaq 16(%rdi),%rdi movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 xorps %xmm0,%xmm3 leaq 32(%rcx),%rcx xorps %xmm3,%xmm2 .Loop_enc1_13: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_13 .byte 102,15,56,221,209 movl %r10d,%eax movq %r11,%rcx movups %xmm2,0(%rsi) leaq 16(%rsi),%rsi subq $16,%rdx jnc .Lcbc_enc_loop addq $16,%rdx jnz .Lcbc_enc_tail pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 movups %xmm2,(%r8) pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 jmp .Lcbc_ret .Lcbc_enc_tail: movq %rdx,%rcx xchgq %rdi,%rsi .long 0x9066A4F3 movl $16,%ecx subq %rdx,%rcx xorl %eax,%eax .long 0x9066AAF3 leaq -16(%rdi),%rdi movl %r10d,%eax movq %rdi,%rsi movq %r11,%rcx xorq %rdx,%rdx jmp .Lcbc_enc_loop .align 16 .Lcbc_decrypt: cmpq $16,%rdx jne .Lcbc_decrypt_bulk movdqu (%rdi),%xmm2 movdqu (%r8),%xmm3 movdqa %xmm2,%xmm4 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_14: .byte 102,15,56,222,209 decl %r10d movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_14 .byte 102,15,56,223,209 pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 movdqu %xmm4,(%r8) xorps %xmm3,%xmm2 pxor %xmm3,%xmm3 movups %xmm2,(%rsi) pxor %xmm2,%xmm2 jmp .Lcbc_ret .align 16 .Lcbc_decrypt_bulk: leaq (%rsp),%r11 .cfi_def_cfa_register %r11 pushq %rbp .cfi_offset %rbp,-16 subq $16,%rsp andq $-16,%rsp movq %rcx,%rbp movups (%r8),%xmm10 movl %r10d,%eax cmpq $0x50,%rdx jbe .Lcbc_dec_tail movups (%rcx),%xmm0 movdqu 0(%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqa %xmm2,%xmm11 movdqu 32(%rdi),%xmm4 movdqa %xmm3,%xmm12 movdqu 48(%rdi),%xmm5 movdqa %xmm4,%xmm13 movdqu 64(%rdi),%xmm6 movdqa %xmm5,%xmm14 movdqu 80(%rdi),%xmm7 movdqa %xmm6,%xmm15 cmpq $0x70,%rdx jbe .Lcbc_dec_six_or_seven subq $0x70,%rdx leaq 112(%rcx),%rcx jmp .Lcbc_dec_loop8_enter .align 16 .Lcbc_dec_loop8: movups %xmm9,(%rsi) leaq 16(%rsi),%rsi .Lcbc_dec_loop8_enter: movdqu 96(%rdi),%xmm8 pxor %xmm0,%xmm2 movdqu 112(%rdi),%xmm9 pxor %xmm0,%xmm3 movups 16-112(%rcx),%xmm1 pxor %xmm0,%xmm4 movq $-1,%rbp cmpq $0x70,%rdx pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 pxor %xmm0,%xmm7 pxor %xmm0,%xmm8 .byte 102,15,56,222,209 pxor %xmm0,%xmm9 movups 32-112(%rcx),%xmm0 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 adcq $0,%rbp andq $128,%rbp .byte 102,68,15,56,222,201 addq %rdi,%rbp movups 48-112(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 64-112(%rcx),%xmm0 nop .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 80-112(%rcx),%xmm1 nop .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 96-112(%rcx),%xmm0 nop .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 112-112(%rcx),%xmm1 nop .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 128-112(%rcx),%xmm0 nop .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 144-112(%rcx),%xmm1 cmpl $11,%eax .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 160-112(%rcx),%xmm0 jb .Lcbc_dec_done .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 176-112(%rcx),%xmm1 nop .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 192-112(%rcx),%xmm0 je .Lcbc_dec_done .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 208-112(%rcx),%xmm1 nop .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 224-112(%rcx),%xmm0 jmp .Lcbc_dec_done .align 16 .Lcbc_dec_done: .byte 102,15,56,222,209 .byte 102,15,56,222,217 pxor %xmm0,%xmm10 pxor %xmm0,%xmm11 .byte 102,15,56,222,225 .byte 102,15,56,222,233 pxor %xmm0,%xmm12 pxor %xmm0,%xmm13 .byte 102,15,56,222,241 .byte 102,15,56,222,249 pxor %xmm0,%xmm14 pxor %xmm0,%xmm15 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movdqu 80(%rdi),%xmm1 .byte 102,65,15,56,223,210 movdqu 96(%rdi),%xmm10 pxor %xmm0,%xmm1 .byte 102,65,15,56,223,219 pxor %xmm0,%xmm10 movdqu 112(%rdi),%xmm0 .byte 102,65,15,56,223,228 leaq 128(%rdi),%rdi movdqu 0(%rbp),%xmm11 .byte 102,65,15,56,223,237 .byte 102,65,15,56,223,246 movdqu 16(%rbp),%xmm12 movdqu 32(%rbp),%xmm13 .byte 102,65,15,56,223,255 .byte 102,68,15,56,223,193 movdqu 48(%rbp),%xmm14 movdqu 64(%rbp),%xmm15 .byte 102,69,15,56,223,202 movdqa %xmm0,%xmm10 movdqu 80(%rbp),%xmm1 movups -112(%rcx),%xmm0 movups %xmm2,(%rsi) movdqa %xmm11,%xmm2 movups %xmm3,16(%rsi) movdqa %xmm12,%xmm3 movups %xmm4,32(%rsi) movdqa %xmm13,%xmm4 movups %xmm5,48(%rsi) movdqa %xmm14,%xmm5 movups %xmm6,64(%rsi) movdqa %xmm15,%xmm6 movups %xmm7,80(%rsi) movdqa %xmm1,%xmm7 movups %xmm8,96(%rsi) leaq 112(%rsi),%rsi subq $0x80,%rdx ja .Lcbc_dec_loop8 movaps %xmm9,%xmm2 leaq -112(%rcx),%rcx addq $0x70,%rdx jle .Lcbc_dec_clear_tail_collected movups %xmm9,(%rsi) leaq 16(%rsi),%rsi cmpq $0x50,%rdx jbe .Lcbc_dec_tail movaps %xmm11,%xmm2 .Lcbc_dec_six_or_seven: cmpq $0x60,%rdx ja .Lcbc_dec_seven movaps %xmm7,%xmm8 call _aesni_decrypt6 pxor %xmm10,%xmm2 movaps %xmm8,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm4,%xmm4 pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) pxor %xmm5,%xmm5 pxor %xmm15,%xmm7 movdqu %xmm6,64(%rsi) pxor %xmm6,%xmm6 leaq 80(%rsi),%rsi movdqa %xmm7,%xmm2 pxor %xmm7,%xmm7 jmp .Lcbc_dec_tail_collected .align 16 .Lcbc_dec_seven: movups 96(%rdi),%xmm8 xorps %xmm9,%xmm9 call _aesni_decrypt8 movups 80(%rdi),%xmm9 pxor %xmm10,%xmm2 movups 96(%rdi),%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm4,%xmm4 pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) pxor %xmm5,%xmm5 pxor %xmm15,%xmm7 movdqu %xmm6,64(%rsi) pxor %xmm6,%xmm6 pxor %xmm9,%xmm8 movdqu %xmm7,80(%rsi) pxor %xmm7,%xmm7 leaq 96(%rsi),%rsi movdqa %xmm8,%xmm2 pxor %xmm8,%xmm8 pxor %xmm9,%xmm9 jmp .Lcbc_dec_tail_collected .Lcbc_dec_tail: movups (%rdi),%xmm2 subq $0x10,%rdx jbe .Lcbc_dec_one movups 16(%rdi),%xmm3 movaps %xmm2,%xmm11 subq $0x10,%rdx jbe .Lcbc_dec_two movups 32(%rdi),%xmm4 movaps %xmm3,%xmm12 subq $0x10,%rdx jbe .Lcbc_dec_three movups 48(%rdi),%xmm5 movaps %xmm4,%xmm13 subq $0x10,%rdx jbe .Lcbc_dec_four movups 64(%rdi),%xmm6 movaps %xmm5,%xmm14 movaps %xmm6,%xmm15 xorps %xmm7,%xmm7 call _aesni_decrypt6 pxor %xmm10,%xmm2 movaps %xmm15,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm4,%xmm4 pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) pxor %xmm5,%xmm5 leaq 64(%rsi),%rsi movdqa %xmm6,%xmm2 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 subq $0x10,%rdx jmp .Lcbc_dec_tail_collected .align 16 .Lcbc_dec_one: movaps %xmm2,%xmm11 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_15: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_15 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movaps %xmm11,%xmm10 jmp .Lcbc_dec_tail_collected .align 16 .Lcbc_dec_two: movaps %xmm3,%xmm12 call _aesni_decrypt2 pxor %xmm10,%xmm2 movaps %xmm12,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) movdqa %xmm3,%xmm2 pxor %xmm3,%xmm3 leaq 16(%rsi),%rsi jmp .Lcbc_dec_tail_collected .align 16 .Lcbc_dec_three: movaps %xmm4,%xmm13 call _aesni_decrypt3 pxor %xmm10,%xmm2 movaps %xmm13,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 movdqa %xmm4,%xmm2 pxor %xmm4,%xmm4 leaq 32(%rsi),%rsi jmp .Lcbc_dec_tail_collected .align 16 .Lcbc_dec_four: movaps %xmm5,%xmm14 call _aesni_decrypt4 pxor %xmm10,%xmm2 movaps %xmm14,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm3,%xmm3 pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm4,%xmm4 movdqa %xmm5,%xmm2 pxor %xmm5,%xmm5 leaq 48(%rsi),%rsi jmp .Lcbc_dec_tail_collected .align 16 .Lcbc_dec_clear_tail_collected: pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 pxor %xmm8,%xmm8 pxor %xmm9,%xmm9 .Lcbc_dec_tail_collected: movups %xmm10,(%r8) andq $15,%rdx jnz .Lcbc_dec_tail_partial movups %xmm2,(%rsi) pxor %xmm2,%xmm2 jmp .Lcbc_dec_ret .align 16 .Lcbc_dec_tail_partial: movaps %xmm2,(%rsp) pxor %xmm2,%xmm2 movq $16,%rcx movq %rsi,%rdi subq %rdx,%rcx leaq (%rsp),%rsi .long 0x9066A4F3 movdqa %xmm2,(%rsp) .Lcbc_dec_ret: xorps %xmm0,%xmm0 pxor %xmm1,%xmm1 movq -8(%r11),%rbp .cfi_restore %rbp leaq (%r11),%rsp .cfi_def_cfa_register %rsp .Lcbc_ret: .byte 0xf3,0xc3 .cfi_endproc .size aes_hw_cbc_encrypt,.-aes_hw_cbc_encrypt .globl aes_hw_set_decrypt_key .hidden aes_hw_set_decrypt_key .type aes_hw_set_decrypt_key,@function .align 16 aes_hw_set_decrypt_key: .cfi_startproc _CET_ENDBR .byte 0x48,0x83,0xEC,0x08 .cfi_adjust_cfa_offset 8 call __aesni_set_encrypt_key shll $4,%esi testl %eax,%eax jnz .Ldec_key_ret leaq 16(%rdx,%rsi,1),%rdi movups (%rdx),%xmm0 movups (%rdi),%xmm1 movups %xmm0,(%rdi) movups %xmm1,(%rdx) leaq 16(%rdx),%rdx leaq -16(%rdi),%rdi .Ldec_key_inverse: movups (%rdx),%xmm0 movups (%rdi),%xmm1 .byte 102,15,56,219,192 .byte 102,15,56,219,201 leaq 16(%rdx),%rdx leaq -16(%rdi),%rdi movups %xmm0,16(%rdi) movups %xmm1,-16(%rdx) cmpq %rdx,%rdi ja .Ldec_key_inverse movups (%rdx),%xmm0 .byte 102,15,56,219,192 pxor %xmm1,%xmm1 movups %xmm0,(%rdi) pxor %xmm0,%xmm0 .Ldec_key_ret: addq $8,%rsp .cfi_adjust_cfa_offset -8 .byte 0xf3,0xc3 .cfi_endproc .LSEH_end_set_decrypt_key: .size aes_hw_set_decrypt_key,.-aes_hw_set_decrypt_key .globl aes_hw_set_encrypt_key .hidden aes_hw_set_encrypt_key .type aes_hw_set_encrypt_key,@function .align 16 aes_hw_set_encrypt_key: __aesni_set_encrypt_key: .cfi_startproc _CET_ENDBR #ifdef BORINGSSL_DISPATCH_TEST movb $1,BORINGSSL_function_hit+3(%rip) #endif .byte 0x48,0x83,0xEC,0x08 .cfi_adjust_cfa_offset 8 movq $-1,%rax testq %rdi,%rdi jz .Lenc_key_ret testq %rdx,%rdx jz .Lenc_key_ret movups (%rdi),%xmm0 xorps %xmm4,%xmm4 leaq OPENSSL_ia32cap_P(%rip),%r10 movl 4(%r10),%r10d andl $268437504,%r10d leaq 16(%rdx),%rax cmpl $256,%esi je .L14rounds cmpl $192,%esi je .L12rounds cmpl $128,%esi jne .Lbad_keybits .L10rounds: movl $9,%esi cmpl $268435456,%r10d je .L10rounds_alt movups %xmm0,(%rdx) .byte 102,15,58,223,200,1 call .Lkey_expansion_128_cold .byte 102,15,58,223,200,2 call .Lkey_expansion_128 .byte 102,15,58,223,200,4 call .Lkey_expansion_128 .byte 102,15,58,223,200,8 call .Lkey_expansion_128 .byte 102,15,58,223,200,16 call .Lkey_expansion_128 .byte 102,15,58,223,200,32 call .Lkey_expansion_128 .byte 102,15,58,223,200,64 call .Lkey_expansion_128 .byte 102,15,58,223,200,128 call .Lkey_expansion_128 .byte 102,15,58,223,200,27 call .Lkey_expansion_128 .byte 102,15,58,223,200,54 call .Lkey_expansion_128 movups %xmm0,(%rax) movl %esi,80(%rax) xorl %eax,%eax jmp .Lenc_key_ret .align 16 .L10rounds_alt: movdqa .Lkey_rotate(%rip),%xmm5 movl $8,%r10d movdqa .Lkey_rcon1(%rip),%xmm4 movdqa %xmm0,%xmm2 movdqu %xmm0,(%rdx) jmp .Loop_key128 .align 16 .Loop_key128: .byte 102,15,56,0,197 .byte 102,15,56,221,196 pslld $1,%xmm4 leaq 16(%rax),%rax movdqa %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm3,%xmm2 pxor %xmm2,%xmm0 movdqu %xmm0,-16(%rax) movdqa %xmm0,%xmm2 decl %r10d jnz .Loop_key128 movdqa .Lkey_rcon1b(%rip),%xmm4 .byte 102,15,56,0,197 .byte 102,15,56,221,196 pslld $1,%xmm4 movdqa %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm3,%xmm2 pxor %xmm2,%xmm0 movdqu %xmm0,(%rax) movdqa %xmm0,%xmm2 .byte 102,15,56,0,197 .byte 102,15,56,221,196 movdqa %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm2,%xmm3 pslldq $4,%xmm2 pxor %xmm3,%xmm2 pxor %xmm2,%xmm0 movdqu %xmm0,16(%rax) movl %esi,96(%rax) xorl %eax,%eax jmp .Lenc_key_ret .align 16 .L12rounds: movq 16(%rdi),%xmm2 movl $11,%esi cmpl $268435456,%r10d je .L12rounds_alt movups %xmm0,(%rdx) .byte 102,15,58,223,202,1 call .Lkey_expansion_192a_cold .byte 102,15,58,223,202,2 call .Lkey_expansion_192b .byte 102,15,58,223,202,4 call .Lkey_expansion_192a .byte 102,15,58,223,202,8 call .Lkey_expansion_192b .byte 102,15,58,223,202,16 call .Lkey_expansion_192a .byte 102,15,58,223,202,32 call .Lkey_expansion_192b .byte 102,15,58,223,202,64 call .Lkey_expansion_192a .byte 102,15,58,223,202,128 call .Lkey_expansion_192b movups %xmm0,(%rax) movl %esi,48(%rax) xorq %rax,%rax jmp .Lenc_key_ret .align 16 .L12rounds_alt: movdqa .Lkey_rotate192(%rip),%xmm5 movdqa .Lkey_rcon1(%rip),%xmm4 movl $8,%r10d movdqu %xmm0,(%rdx) jmp .Loop_key192 .align 16 .Loop_key192: movq %xmm2,0(%rax) movdqa %xmm2,%xmm1 .byte 102,15,56,0,213 .byte 102,15,56,221,212 pslld $1,%xmm4 leaq 24(%rax),%rax movdqa %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm3,%xmm0 pshufd $0xff,%xmm0,%xmm3 pxor %xmm1,%xmm3 pslldq $4,%xmm1 pxor %xmm1,%xmm3 pxor %xmm2,%xmm0 pxor %xmm3,%xmm2 movdqu %xmm0,-16(%rax) decl %r10d jnz .Loop_key192 movl %esi,32(%rax) xorl %eax,%eax jmp .Lenc_key_ret .align 16 .L14rounds: movups 16(%rdi),%xmm2 movl $13,%esi leaq 16(%rax),%rax cmpl $268435456,%r10d je .L14rounds_alt movups %xmm0,(%rdx) movups %xmm2,16(%rdx) .byte 102,15,58,223,202,1 call .Lkey_expansion_256a_cold .byte 102,15,58,223,200,1 call .Lkey_expansion_256b .byte 102,15,58,223,202,2 call .Lkey_expansion_256a .byte 102,15,58,223,200,2 call .Lkey_expansion_256b .byte 102,15,58,223,202,4 call .Lkey_expansion_256a .byte 102,15,58,223,200,4 call .Lkey_expansion_256b .byte 102,15,58,223,202,8 call .Lkey_expansion_256a .byte 102,15,58,223,200,8 call .Lkey_expansion_256b .byte 102,15,58,223,202,16 call .Lkey_expansion_256a .byte 102,15,58,223,200,16 call .Lkey_expansion_256b .byte 102,15,58,223,202,32 call .Lkey_expansion_256a .byte 102,15,58,223,200,32 call .Lkey_expansion_256b .byte 102,15,58,223,202,64 call .Lkey_expansion_256a movups %xmm0,(%rax) movl %esi,16(%rax) xorq %rax,%rax jmp .Lenc_key_ret .align 16 .L14rounds_alt: movdqa .Lkey_rotate(%rip),%xmm5 movdqa .Lkey_rcon1(%rip),%xmm4 movl $7,%r10d movdqu %xmm0,0(%rdx) movdqa %xmm2,%xmm1 movdqu %xmm2,16(%rdx) jmp .Loop_key256 .align 16 .Loop_key256: .byte 102,15,56,0,213 .byte 102,15,56,221,212 movdqa %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm0,%xmm3 pslldq $4,%xmm0 pxor %xmm3,%xmm0 pslld $1,%xmm4 pxor %xmm2,%xmm0 movdqu %xmm0,(%rax) decl %r10d jz .Ldone_key256 pshufd $0xff,%xmm0,%xmm2 pxor %xmm3,%xmm3 .byte 102,15,56,221,211 movdqa %xmm1,%xmm3 pslldq $4,%xmm1 pxor %xmm1,%xmm3 pslldq $4,%xmm1 pxor %xmm1,%xmm3 pslldq $4,%xmm1 pxor %xmm3,%xmm1 pxor %xmm1,%xmm2 movdqu %xmm2,16(%rax) leaq 32(%rax),%rax movdqa %xmm2,%xmm1 jmp .Loop_key256 .Ldone_key256: movl %esi,16(%rax) xorl %eax,%eax jmp .Lenc_key_ret .align 16 .Lbad_keybits: movq $-2,%rax .Lenc_key_ret: pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 addq $8,%rsp .cfi_adjust_cfa_offset -8 .byte 0xf3,0xc3 .cfi_endproc .LSEH_end_set_encrypt_key: .align 16 .Lkey_expansion_128: movups %xmm0,(%rax) leaq 16(%rax),%rax .Lkey_expansion_128_cold: shufps $16,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $255,%xmm1,%xmm1 xorps %xmm1,%xmm0 .byte 0xf3,0xc3 .align 16 .Lkey_expansion_192a: movups %xmm0,(%rax) leaq 16(%rax),%rax .Lkey_expansion_192a_cold: movaps %xmm2,%xmm5 .Lkey_expansion_192b_warm: shufps $16,%xmm0,%xmm4 movdqa %xmm2,%xmm3 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 pslldq $4,%xmm3 xorps %xmm4,%xmm0 pshufd $85,%xmm1,%xmm1 pxor %xmm3,%xmm2 pxor %xmm1,%xmm0 pshufd $255,%xmm0,%xmm3 pxor %xmm3,%xmm2 .byte 0xf3,0xc3 .align 16 .Lkey_expansion_192b: movaps %xmm0,%xmm3 shufps $68,%xmm0,%xmm5 movups %xmm5,(%rax) shufps $78,%xmm2,%xmm3 movups %xmm3,16(%rax) leaq 32(%rax),%rax jmp .Lkey_expansion_192b_warm .align 16 .Lkey_expansion_256a: movups %xmm2,(%rax) leaq 16(%rax),%rax .Lkey_expansion_256a_cold: shufps $16,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $255,%xmm1,%xmm1 xorps %xmm1,%xmm0 .byte 0xf3,0xc3 .align 16 .Lkey_expansion_256b: movups %xmm0,(%rax) leaq 16(%rax),%rax shufps $16,%xmm2,%xmm4 xorps %xmm4,%xmm2 shufps $140,%xmm2,%xmm4 xorps %xmm4,%xmm2 shufps $170,%xmm1,%xmm1 xorps %xmm1,%xmm2 .byte 0xf3,0xc3 .size aes_hw_set_encrypt_key,.-aes_hw_set_encrypt_key .size __aesni_set_encrypt_key,.-__aesni_set_encrypt_key .section .rodata .align 64 .Lbswap_mask: .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 .Lincrement32: .long 6,6,6,0 .Lincrement64: .long 1,0,0,0 .Lxts_magic: .long 0x87,0,1,0 .Lincrement1: .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 .Lkey_rotate: .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d .Lkey_rotate192: .long 0x04070605,0x04070605,0x04070605,0x04070605 .Lkey_rcon1: .long 1,1,1,1 .Lkey_rcon1b: .long 0x1b,0x1b,0x1b,0x1b .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 64 .text #endif
marvin-hansen/iggy-streaming-system
33,587
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/chacha/chacha-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .section .rodata .align 64 .Lzero: .long 0,0,0,0 .Lone: .long 1,0,0,0 .Linc: .long 0,1,2,3 .Lfour: .long 4,4,4,4 .Lincy: .long 0,2,4,6,1,3,5,7 .Leight: .long 8,8,8,8,8,8,8,8 .Lrot16: .byte 0x2,0x3,0x0,0x1, 0x6,0x7,0x4,0x5, 0xa,0xb,0x8,0x9, 0xe,0xf,0xc,0xd .Lrot24: .byte 0x3,0x0,0x1,0x2, 0x7,0x4,0x5,0x6, 0xb,0x8,0x9,0xa, 0xf,0xc,0xd,0xe .Lsigma: .byte 101,120,112,97,110,100,32,51,50,45,98,121,116,101,32,107,0 .align 64 .Lzeroz: .long 0,0,0,0, 1,0,0,0, 2,0,0,0, 3,0,0,0 .Lfourz: .long 4,0,0,0, 4,0,0,0, 4,0,0,0, 4,0,0,0 .Lincz: .long 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 .Lsixteen: .long 16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16 .byte 67,104,97,67,104,97,50,48,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .globl ChaCha20_ctr32_nohw .hidden ChaCha20_ctr32_nohw .type ChaCha20_ctr32_nohw,@function .align 64 ChaCha20_ctr32_nohw: .cfi_startproc _CET_ENDBR pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset r15,-56 subq $64+24,%rsp .cfi_adjust_cfa_offset 88 .Lctr32_body: movdqu (%rcx),%xmm1 movdqu 16(%rcx),%xmm2 movdqu (%r8),%xmm3 movdqa .Lone(%rip),%xmm4 movdqa %xmm1,16(%rsp) movdqa %xmm2,32(%rsp) movdqa %xmm3,48(%rsp) movq %rdx,%rbp jmp .Loop_outer .align 32 .Loop_outer: movl $0x61707865,%eax movl $0x3320646e,%ebx movl $0x79622d32,%ecx movl $0x6b206574,%edx movl 16(%rsp),%r8d movl 20(%rsp),%r9d movl 24(%rsp),%r10d movl 28(%rsp),%r11d movd %xmm3,%r12d movl 52(%rsp),%r13d movl 56(%rsp),%r14d movl 60(%rsp),%r15d movq %rbp,64+0(%rsp) movl $10,%ebp movq %rsi,64+8(%rsp) .byte 102,72,15,126,214 movq %rdi,64+16(%rsp) movq %rsi,%rdi shrq $32,%rdi jmp .Loop .align 32 .Loop: addl %r8d,%eax xorl %eax,%r12d roll $16,%r12d addl %r9d,%ebx xorl %ebx,%r13d roll $16,%r13d addl %r12d,%esi xorl %esi,%r8d roll $12,%r8d addl %r13d,%edi xorl %edi,%r9d roll $12,%r9d addl %r8d,%eax xorl %eax,%r12d roll $8,%r12d addl %r9d,%ebx xorl %ebx,%r13d roll $8,%r13d addl %r12d,%esi xorl %esi,%r8d roll $7,%r8d addl %r13d,%edi xorl %edi,%r9d roll $7,%r9d movl %esi,32(%rsp) movl %edi,36(%rsp) movl 40(%rsp),%esi movl 44(%rsp),%edi addl %r10d,%ecx xorl %ecx,%r14d roll $16,%r14d addl %r11d,%edx xorl %edx,%r15d roll $16,%r15d addl %r14d,%esi xorl %esi,%r10d roll $12,%r10d addl %r15d,%edi xorl %edi,%r11d roll $12,%r11d addl %r10d,%ecx xorl %ecx,%r14d roll $8,%r14d addl %r11d,%edx xorl %edx,%r15d roll $8,%r15d addl %r14d,%esi xorl %esi,%r10d roll $7,%r10d addl %r15d,%edi xorl %edi,%r11d roll $7,%r11d addl %r9d,%eax xorl %eax,%r15d roll $16,%r15d addl %r10d,%ebx xorl %ebx,%r12d roll $16,%r12d addl %r15d,%esi xorl %esi,%r9d roll $12,%r9d addl %r12d,%edi xorl %edi,%r10d roll $12,%r10d addl %r9d,%eax xorl %eax,%r15d roll $8,%r15d addl %r10d,%ebx xorl %ebx,%r12d roll $8,%r12d addl %r15d,%esi xorl %esi,%r9d roll $7,%r9d addl %r12d,%edi xorl %edi,%r10d roll $7,%r10d movl %esi,40(%rsp) movl %edi,44(%rsp) movl 32(%rsp),%esi movl 36(%rsp),%edi addl %r11d,%ecx xorl %ecx,%r13d roll $16,%r13d addl %r8d,%edx xorl %edx,%r14d roll $16,%r14d addl %r13d,%esi xorl %esi,%r11d roll $12,%r11d addl %r14d,%edi xorl %edi,%r8d roll $12,%r8d addl %r11d,%ecx xorl %ecx,%r13d roll $8,%r13d addl %r8d,%edx xorl %edx,%r14d roll $8,%r14d addl %r13d,%esi xorl %esi,%r11d roll $7,%r11d addl %r14d,%edi xorl %edi,%r8d roll $7,%r8d decl %ebp jnz .Loop movl %edi,36(%rsp) movl %esi,32(%rsp) movq 64(%rsp),%rbp movdqa %xmm2,%xmm1 movq 64+8(%rsp),%rsi paddd %xmm4,%xmm3 movq 64+16(%rsp),%rdi addl $0x61707865,%eax addl $0x3320646e,%ebx addl $0x79622d32,%ecx addl $0x6b206574,%edx addl 16(%rsp),%r8d addl 20(%rsp),%r9d addl 24(%rsp),%r10d addl 28(%rsp),%r11d addl 48(%rsp),%r12d addl 52(%rsp),%r13d addl 56(%rsp),%r14d addl 60(%rsp),%r15d paddd 32(%rsp),%xmm1 cmpq $64,%rbp jb .Ltail xorl 0(%rsi),%eax xorl 4(%rsi),%ebx xorl 8(%rsi),%ecx xorl 12(%rsi),%edx xorl 16(%rsi),%r8d xorl 20(%rsi),%r9d xorl 24(%rsi),%r10d xorl 28(%rsi),%r11d movdqu 32(%rsi),%xmm0 xorl 48(%rsi),%r12d xorl 52(%rsi),%r13d xorl 56(%rsi),%r14d xorl 60(%rsi),%r15d leaq 64(%rsi),%rsi pxor %xmm1,%xmm0 movdqa %xmm2,32(%rsp) movd %xmm3,48(%rsp) movl %eax,0(%rdi) movl %ebx,4(%rdi) movl %ecx,8(%rdi) movl %edx,12(%rdi) movl %r8d,16(%rdi) movl %r9d,20(%rdi) movl %r10d,24(%rdi) movl %r11d,28(%rdi) movdqu %xmm0,32(%rdi) movl %r12d,48(%rdi) movl %r13d,52(%rdi) movl %r14d,56(%rdi) movl %r15d,60(%rdi) leaq 64(%rdi),%rdi subq $64,%rbp jnz .Loop_outer jmp .Ldone .align 16 .Ltail: movl %eax,0(%rsp) movl %ebx,4(%rsp) xorq %rbx,%rbx movl %ecx,8(%rsp) movl %edx,12(%rsp) movl %r8d,16(%rsp) movl %r9d,20(%rsp) movl %r10d,24(%rsp) movl %r11d,28(%rsp) movdqa %xmm1,32(%rsp) movl %r12d,48(%rsp) movl %r13d,52(%rsp) movl %r14d,56(%rsp) movl %r15d,60(%rsp) .Loop_tail: movzbl (%rsi,%rbx,1),%eax movzbl (%rsp,%rbx,1),%edx leaq 1(%rbx),%rbx xorl %edx,%eax movb %al,-1(%rdi,%rbx,1) decq %rbp jnz .Loop_tail .Ldone: leaq 64+24+48(%rsp),%rsi movq -48(%rsi),%r15 .cfi_restore r15 movq -40(%rsi),%r14 .cfi_restore r14 movq -32(%rsi),%r13 .cfi_restore r13 movq -24(%rsi),%r12 .cfi_restore r12 movq -16(%rsi),%rbp .cfi_restore rbp movq -8(%rsi),%rbx .cfi_restore rbx leaq (%rsi),%rsp .cfi_adjust_cfa_offset -136 .Lno_data: .byte 0xf3,0xc3 .cfi_endproc .size ChaCha20_ctr32_nohw,.-ChaCha20_ctr32_nohw .globl ChaCha20_ctr32_ssse3 .hidden ChaCha20_ctr32_ssse3 .type ChaCha20_ctr32_ssse3,@function .align 32 ChaCha20_ctr32_ssse3: .cfi_startproc _CET_ENDBR movq %rsp,%r9 .cfi_def_cfa_register r9 subq $64+8,%rsp movdqa .Lsigma(%rip),%xmm0 movdqu (%rcx),%xmm1 movdqu 16(%rcx),%xmm2 movdqu (%r8),%xmm3 movdqa .Lrot16(%rip),%xmm6 movdqa .Lrot24(%rip),%xmm7 movdqa %xmm0,0(%rsp) movdqa %xmm1,16(%rsp) movdqa %xmm2,32(%rsp) movdqa %xmm3,48(%rsp) movq $10,%r8 jmp .Loop_ssse3 .align 32 .Loop_outer_ssse3: movdqa .Lone(%rip),%xmm3 movdqa 0(%rsp),%xmm0 movdqa 16(%rsp),%xmm1 movdqa 32(%rsp),%xmm2 paddd 48(%rsp),%xmm3 movq $10,%r8 movdqa %xmm3,48(%rsp) jmp .Loop_ssse3 .align 32 .Loop_ssse3: paddd %xmm1,%xmm0 pxor %xmm0,%xmm3 .byte 102,15,56,0,222 paddd %xmm3,%xmm2 pxor %xmm2,%xmm1 movdqa %xmm1,%xmm4 psrld $20,%xmm1 pslld $12,%xmm4 por %xmm4,%xmm1 paddd %xmm1,%xmm0 pxor %xmm0,%xmm3 .byte 102,15,56,0,223 paddd %xmm3,%xmm2 pxor %xmm2,%xmm1 movdqa %xmm1,%xmm4 psrld $25,%xmm1 pslld $7,%xmm4 por %xmm4,%xmm1 pshufd $78,%xmm2,%xmm2 pshufd $57,%xmm1,%xmm1 pshufd $147,%xmm3,%xmm3 nop paddd %xmm1,%xmm0 pxor %xmm0,%xmm3 .byte 102,15,56,0,222 paddd %xmm3,%xmm2 pxor %xmm2,%xmm1 movdqa %xmm1,%xmm4 psrld $20,%xmm1 pslld $12,%xmm4 por %xmm4,%xmm1 paddd %xmm1,%xmm0 pxor %xmm0,%xmm3 .byte 102,15,56,0,223 paddd %xmm3,%xmm2 pxor %xmm2,%xmm1 movdqa %xmm1,%xmm4 psrld $25,%xmm1 pslld $7,%xmm4 por %xmm4,%xmm1 pshufd $78,%xmm2,%xmm2 pshufd $147,%xmm1,%xmm1 pshufd $57,%xmm3,%xmm3 decq %r8 jnz .Loop_ssse3 paddd 0(%rsp),%xmm0 paddd 16(%rsp),%xmm1 paddd 32(%rsp),%xmm2 paddd 48(%rsp),%xmm3 cmpq $64,%rdx jb .Ltail_ssse3 movdqu 0(%rsi),%xmm4 movdqu 16(%rsi),%xmm5 pxor %xmm4,%xmm0 movdqu 32(%rsi),%xmm4 pxor %xmm5,%xmm1 movdqu 48(%rsi),%xmm5 leaq 64(%rsi),%rsi pxor %xmm4,%xmm2 pxor %xmm5,%xmm3 movdqu %xmm0,0(%rdi) movdqu %xmm1,16(%rdi) movdqu %xmm2,32(%rdi) movdqu %xmm3,48(%rdi) leaq 64(%rdi),%rdi subq $64,%rdx jnz .Loop_outer_ssse3 jmp .Ldone_ssse3 .align 16 .Ltail_ssse3: movdqa %xmm0,0(%rsp) movdqa %xmm1,16(%rsp) movdqa %xmm2,32(%rsp) movdqa %xmm3,48(%rsp) xorq %r8,%r8 .Loop_tail_ssse3: movzbl (%rsi,%r8,1),%eax movzbl (%rsp,%r8,1),%ecx leaq 1(%r8),%r8 xorl %ecx,%eax movb %al,-1(%rdi,%r8,1) decq %rdx jnz .Loop_tail_ssse3 .Ldone_ssse3: leaq (%r9),%rsp .cfi_def_cfa_register rsp .Lssse3_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ChaCha20_ctr32_ssse3,.-ChaCha20_ctr32_ssse3 .globl ChaCha20_ctr32_ssse3_4x .hidden ChaCha20_ctr32_ssse3_4x .type ChaCha20_ctr32_ssse3_4x,@function .align 32 ChaCha20_ctr32_ssse3_4x: .cfi_startproc _CET_ENDBR movq %rsp,%r9 .cfi_def_cfa_register r9 movq %r10,%r11 subq $0x140+8,%rsp movdqa .Lsigma(%rip),%xmm11 movdqu (%rcx),%xmm15 movdqu 16(%rcx),%xmm7 movdqu (%r8),%xmm3 leaq 256(%rsp),%rcx leaq .Lrot16(%rip),%r10 leaq .Lrot24(%rip),%r11 pshufd $0x00,%xmm11,%xmm8 pshufd $0x55,%xmm11,%xmm9 movdqa %xmm8,64(%rsp) pshufd $0xaa,%xmm11,%xmm10 movdqa %xmm9,80(%rsp) pshufd $0xff,%xmm11,%xmm11 movdqa %xmm10,96(%rsp) movdqa %xmm11,112(%rsp) pshufd $0x00,%xmm15,%xmm12 pshufd $0x55,%xmm15,%xmm13 movdqa %xmm12,128-256(%rcx) pshufd $0xaa,%xmm15,%xmm14 movdqa %xmm13,144-256(%rcx) pshufd $0xff,%xmm15,%xmm15 movdqa %xmm14,160-256(%rcx) movdqa %xmm15,176-256(%rcx) pshufd $0x00,%xmm7,%xmm4 pshufd $0x55,%xmm7,%xmm5 movdqa %xmm4,192-256(%rcx) pshufd $0xaa,%xmm7,%xmm6 movdqa %xmm5,208-256(%rcx) pshufd $0xff,%xmm7,%xmm7 movdqa %xmm6,224-256(%rcx) movdqa %xmm7,240-256(%rcx) pshufd $0x00,%xmm3,%xmm0 pshufd $0x55,%xmm3,%xmm1 paddd .Linc(%rip),%xmm0 pshufd $0xaa,%xmm3,%xmm2 movdqa %xmm1,272-256(%rcx) pshufd $0xff,%xmm3,%xmm3 movdqa %xmm2,288-256(%rcx) movdqa %xmm3,304-256(%rcx) jmp .Loop_enter4x .align 32 .Loop_outer4x: movdqa 64(%rsp),%xmm8 movdqa 80(%rsp),%xmm9 movdqa 96(%rsp),%xmm10 movdqa 112(%rsp),%xmm11 movdqa 128-256(%rcx),%xmm12 movdqa 144-256(%rcx),%xmm13 movdqa 160-256(%rcx),%xmm14 movdqa 176-256(%rcx),%xmm15 movdqa 192-256(%rcx),%xmm4 movdqa 208-256(%rcx),%xmm5 movdqa 224-256(%rcx),%xmm6 movdqa 240-256(%rcx),%xmm7 movdqa 256-256(%rcx),%xmm0 movdqa 272-256(%rcx),%xmm1 movdqa 288-256(%rcx),%xmm2 movdqa 304-256(%rcx),%xmm3 paddd .Lfour(%rip),%xmm0 .Loop_enter4x: movdqa %xmm6,32(%rsp) movdqa %xmm7,48(%rsp) movdqa (%r10),%xmm7 movl $10,%eax movdqa %xmm0,256-256(%rcx) jmp .Loop4x .align 32 .Loop4x: paddd %xmm12,%xmm8 paddd %xmm13,%xmm9 pxor %xmm8,%xmm0 pxor %xmm9,%xmm1 .byte 102,15,56,0,199 .byte 102,15,56,0,207 paddd %xmm0,%xmm4 paddd %xmm1,%xmm5 pxor %xmm4,%xmm12 pxor %xmm5,%xmm13 movdqa %xmm12,%xmm6 pslld $12,%xmm12 psrld $20,%xmm6 movdqa %xmm13,%xmm7 pslld $12,%xmm13 por %xmm6,%xmm12 psrld $20,%xmm7 movdqa (%r11),%xmm6 por %xmm7,%xmm13 paddd %xmm12,%xmm8 paddd %xmm13,%xmm9 pxor %xmm8,%xmm0 pxor %xmm9,%xmm1 .byte 102,15,56,0,198 .byte 102,15,56,0,206 paddd %xmm0,%xmm4 paddd %xmm1,%xmm5 pxor %xmm4,%xmm12 pxor %xmm5,%xmm13 movdqa %xmm12,%xmm7 pslld $7,%xmm12 psrld $25,%xmm7 movdqa %xmm13,%xmm6 pslld $7,%xmm13 por %xmm7,%xmm12 psrld $25,%xmm6 movdqa (%r10),%xmm7 por %xmm6,%xmm13 movdqa %xmm4,0(%rsp) movdqa %xmm5,16(%rsp) movdqa 32(%rsp),%xmm4 movdqa 48(%rsp),%xmm5 paddd %xmm14,%xmm10 paddd %xmm15,%xmm11 pxor %xmm10,%xmm2 pxor %xmm11,%xmm3 .byte 102,15,56,0,215 .byte 102,15,56,0,223 paddd %xmm2,%xmm4 paddd %xmm3,%xmm5 pxor %xmm4,%xmm14 pxor %xmm5,%xmm15 movdqa %xmm14,%xmm6 pslld $12,%xmm14 psrld $20,%xmm6 movdqa %xmm15,%xmm7 pslld $12,%xmm15 por %xmm6,%xmm14 psrld $20,%xmm7 movdqa (%r11),%xmm6 por %xmm7,%xmm15 paddd %xmm14,%xmm10 paddd %xmm15,%xmm11 pxor %xmm10,%xmm2 pxor %xmm11,%xmm3 .byte 102,15,56,0,214 .byte 102,15,56,0,222 paddd %xmm2,%xmm4 paddd %xmm3,%xmm5 pxor %xmm4,%xmm14 pxor %xmm5,%xmm15 movdqa %xmm14,%xmm7 pslld $7,%xmm14 psrld $25,%xmm7 movdqa %xmm15,%xmm6 pslld $7,%xmm15 por %xmm7,%xmm14 psrld $25,%xmm6 movdqa (%r10),%xmm7 por %xmm6,%xmm15 paddd %xmm13,%xmm8 paddd %xmm14,%xmm9 pxor %xmm8,%xmm3 pxor %xmm9,%xmm0 .byte 102,15,56,0,223 .byte 102,15,56,0,199 paddd %xmm3,%xmm4 paddd %xmm0,%xmm5 pxor %xmm4,%xmm13 pxor %xmm5,%xmm14 movdqa %xmm13,%xmm6 pslld $12,%xmm13 psrld $20,%xmm6 movdqa %xmm14,%xmm7 pslld $12,%xmm14 por %xmm6,%xmm13 psrld $20,%xmm7 movdqa (%r11),%xmm6 por %xmm7,%xmm14 paddd %xmm13,%xmm8 paddd %xmm14,%xmm9 pxor %xmm8,%xmm3 pxor %xmm9,%xmm0 .byte 102,15,56,0,222 .byte 102,15,56,0,198 paddd %xmm3,%xmm4 paddd %xmm0,%xmm5 pxor %xmm4,%xmm13 pxor %xmm5,%xmm14 movdqa %xmm13,%xmm7 pslld $7,%xmm13 psrld $25,%xmm7 movdqa %xmm14,%xmm6 pslld $7,%xmm14 por %xmm7,%xmm13 psrld $25,%xmm6 movdqa (%r10),%xmm7 por %xmm6,%xmm14 movdqa %xmm4,32(%rsp) movdqa %xmm5,48(%rsp) movdqa 0(%rsp),%xmm4 movdqa 16(%rsp),%xmm5 paddd %xmm15,%xmm10 paddd %xmm12,%xmm11 pxor %xmm10,%xmm1 pxor %xmm11,%xmm2 .byte 102,15,56,0,207 .byte 102,15,56,0,215 paddd %xmm1,%xmm4 paddd %xmm2,%xmm5 pxor %xmm4,%xmm15 pxor %xmm5,%xmm12 movdqa %xmm15,%xmm6 pslld $12,%xmm15 psrld $20,%xmm6 movdqa %xmm12,%xmm7 pslld $12,%xmm12 por %xmm6,%xmm15 psrld $20,%xmm7 movdqa (%r11),%xmm6 por %xmm7,%xmm12 paddd %xmm15,%xmm10 paddd %xmm12,%xmm11 pxor %xmm10,%xmm1 pxor %xmm11,%xmm2 .byte 102,15,56,0,206 .byte 102,15,56,0,214 paddd %xmm1,%xmm4 paddd %xmm2,%xmm5 pxor %xmm4,%xmm15 pxor %xmm5,%xmm12 movdqa %xmm15,%xmm7 pslld $7,%xmm15 psrld $25,%xmm7 movdqa %xmm12,%xmm6 pslld $7,%xmm12 por %xmm7,%xmm15 psrld $25,%xmm6 movdqa (%r10),%xmm7 por %xmm6,%xmm12 decl %eax jnz .Loop4x paddd 64(%rsp),%xmm8 paddd 80(%rsp),%xmm9 paddd 96(%rsp),%xmm10 paddd 112(%rsp),%xmm11 movdqa %xmm8,%xmm6 punpckldq %xmm9,%xmm8 movdqa %xmm10,%xmm7 punpckldq %xmm11,%xmm10 punpckhdq %xmm9,%xmm6 punpckhdq %xmm11,%xmm7 movdqa %xmm8,%xmm9 punpcklqdq %xmm10,%xmm8 movdqa %xmm6,%xmm11 punpcklqdq %xmm7,%xmm6 punpckhqdq %xmm10,%xmm9 punpckhqdq %xmm7,%xmm11 paddd 128-256(%rcx),%xmm12 paddd 144-256(%rcx),%xmm13 paddd 160-256(%rcx),%xmm14 paddd 176-256(%rcx),%xmm15 movdqa %xmm8,0(%rsp) movdqa %xmm9,16(%rsp) movdqa 32(%rsp),%xmm8 movdqa 48(%rsp),%xmm9 movdqa %xmm12,%xmm10 punpckldq %xmm13,%xmm12 movdqa %xmm14,%xmm7 punpckldq %xmm15,%xmm14 punpckhdq %xmm13,%xmm10 punpckhdq %xmm15,%xmm7 movdqa %xmm12,%xmm13 punpcklqdq %xmm14,%xmm12 movdqa %xmm10,%xmm15 punpcklqdq %xmm7,%xmm10 punpckhqdq %xmm14,%xmm13 punpckhqdq %xmm7,%xmm15 paddd 192-256(%rcx),%xmm4 paddd 208-256(%rcx),%xmm5 paddd 224-256(%rcx),%xmm8 paddd 240-256(%rcx),%xmm9 movdqa %xmm6,32(%rsp) movdqa %xmm11,48(%rsp) movdqa %xmm4,%xmm14 punpckldq %xmm5,%xmm4 movdqa %xmm8,%xmm7 punpckldq %xmm9,%xmm8 punpckhdq %xmm5,%xmm14 punpckhdq %xmm9,%xmm7 movdqa %xmm4,%xmm5 punpcklqdq %xmm8,%xmm4 movdqa %xmm14,%xmm9 punpcklqdq %xmm7,%xmm14 punpckhqdq %xmm8,%xmm5 punpckhqdq %xmm7,%xmm9 paddd 256-256(%rcx),%xmm0 paddd 272-256(%rcx),%xmm1 paddd 288-256(%rcx),%xmm2 paddd 304-256(%rcx),%xmm3 movdqa %xmm0,%xmm8 punpckldq %xmm1,%xmm0 movdqa %xmm2,%xmm7 punpckldq %xmm3,%xmm2 punpckhdq %xmm1,%xmm8 punpckhdq %xmm3,%xmm7 movdqa %xmm0,%xmm1 punpcklqdq %xmm2,%xmm0 movdqa %xmm8,%xmm3 punpcklqdq %xmm7,%xmm8 punpckhqdq %xmm2,%xmm1 punpckhqdq %xmm7,%xmm3 cmpq $256,%rdx jb .Ltail4x movdqu 0(%rsi),%xmm6 movdqu 16(%rsi),%xmm11 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm7 pxor 0(%rsp),%xmm6 pxor %xmm12,%xmm11 pxor %xmm4,%xmm2 pxor %xmm0,%xmm7 movdqu %xmm6,0(%rdi) movdqu 64(%rsi),%xmm6 movdqu %xmm11,16(%rdi) movdqu 80(%rsi),%xmm11 movdqu %xmm2,32(%rdi) movdqu 96(%rsi),%xmm2 movdqu %xmm7,48(%rdi) movdqu 112(%rsi),%xmm7 leaq 128(%rsi),%rsi pxor 16(%rsp),%xmm6 pxor %xmm13,%xmm11 pxor %xmm5,%xmm2 pxor %xmm1,%xmm7 movdqu %xmm6,64(%rdi) movdqu 0(%rsi),%xmm6 movdqu %xmm11,80(%rdi) movdqu 16(%rsi),%xmm11 movdqu %xmm2,96(%rdi) movdqu 32(%rsi),%xmm2 movdqu %xmm7,112(%rdi) leaq 128(%rdi),%rdi movdqu 48(%rsi),%xmm7 pxor 32(%rsp),%xmm6 pxor %xmm10,%xmm11 pxor %xmm14,%xmm2 pxor %xmm8,%xmm7 movdqu %xmm6,0(%rdi) movdqu 64(%rsi),%xmm6 movdqu %xmm11,16(%rdi) movdqu 80(%rsi),%xmm11 movdqu %xmm2,32(%rdi) movdqu 96(%rsi),%xmm2 movdqu %xmm7,48(%rdi) movdqu 112(%rsi),%xmm7 leaq 128(%rsi),%rsi pxor 48(%rsp),%xmm6 pxor %xmm15,%xmm11 pxor %xmm9,%xmm2 pxor %xmm3,%xmm7 movdqu %xmm6,64(%rdi) movdqu %xmm11,80(%rdi) movdqu %xmm2,96(%rdi) movdqu %xmm7,112(%rdi) leaq 128(%rdi),%rdi subq $256,%rdx jnz .Loop_outer4x jmp .Ldone4x .Ltail4x: cmpq $192,%rdx jae .L192_or_more4x cmpq $128,%rdx jae .L128_or_more4x cmpq $64,%rdx jae .L64_or_more4x xorq %r10,%r10 movdqa %xmm12,16(%rsp) movdqa %xmm4,32(%rsp) movdqa %xmm0,48(%rsp) jmp .Loop_tail4x .align 32 .L64_or_more4x: movdqu 0(%rsi),%xmm6 movdqu 16(%rsi),%xmm11 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm7 pxor 0(%rsp),%xmm6 pxor %xmm12,%xmm11 pxor %xmm4,%xmm2 pxor %xmm0,%xmm7 movdqu %xmm6,0(%rdi) movdqu %xmm11,16(%rdi) movdqu %xmm2,32(%rdi) movdqu %xmm7,48(%rdi) je .Ldone4x movdqa 16(%rsp),%xmm6 leaq 64(%rsi),%rsi xorq %r10,%r10 movdqa %xmm6,0(%rsp) movdqa %xmm13,16(%rsp) leaq 64(%rdi),%rdi movdqa %xmm5,32(%rsp) subq $64,%rdx movdqa %xmm1,48(%rsp) jmp .Loop_tail4x .align 32 .L128_or_more4x: movdqu 0(%rsi),%xmm6 movdqu 16(%rsi),%xmm11 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm7 pxor 0(%rsp),%xmm6 pxor %xmm12,%xmm11 pxor %xmm4,%xmm2 pxor %xmm0,%xmm7 movdqu %xmm6,0(%rdi) movdqu 64(%rsi),%xmm6 movdqu %xmm11,16(%rdi) movdqu 80(%rsi),%xmm11 movdqu %xmm2,32(%rdi) movdqu 96(%rsi),%xmm2 movdqu %xmm7,48(%rdi) movdqu 112(%rsi),%xmm7 pxor 16(%rsp),%xmm6 pxor %xmm13,%xmm11 pxor %xmm5,%xmm2 pxor %xmm1,%xmm7 movdqu %xmm6,64(%rdi) movdqu %xmm11,80(%rdi) movdqu %xmm2,96(%rdi) movdqu %xmm7,112(%rdi) je .Ldone4x movdqa 32(%rsp),%xmm6 leaq 128(%rsi),%rsi xorq %r10,%r10 movdqa %xmm6,0(%rsp) movdqa %xmm10,16(%rsp) leaq 128(%rdi),%rdi movdqa %xmm14,32(%rsp) subq $128,%rdx movdqa %xmm8,48(%rsp) jmp .Loop_tail4x .align 32 .L192_or_more4x: movdqu 0(%rsi),%xmm6 movdqu 16(%rsi),%xmm11 movdqu 32(%rsi),%xmm2 movdqu 48(%rsi),%xmm7 pxor 0(%rsp),%xmm6 pxor %xmm12,%xmm11 pxor %xmm4,%xmm2 pxor %xmm0,%xmm7 movdqu %xmm6,0(%rdi) movdqu 64(%rsi),%xmm6 movdqu %xmm11,16(%rdi) movdqu 80(%rsi),%xmm11 movdqu %xmm2,32(%rdi) movdqu 96(%rsi),%xmm2 movdqu %xmm7,48(%rdi) movdqu 112(%rsi),%xmm7 leaq 128(%rsi),%rsi pxor 16(%rsp),%xmm6 pxor %xmm13,%xmm11 pxor %xmm5,%xmm2 pxor %xmm1,%xmm7 movdqu %xmm6,64(%rdi) movdqu 0(%rsi),%xmm6 movdqu %xmm11,80(%rdi) movdqu 16(%rsi),%xmm11 movdqu %xmm2,96(%rdi) movdqu 32(%rsi),%xmm2 movdqu %xmm7,112(%rdi) leaq 128(%rdi),%rdi movdqu 48(%rsi),%xmm7 pxor 32(%rsp),%xmm6 pxor %xmm10,%xmm11 pxor %xmm14,%xmm2 pxor %xmm8,%xmm7 movdqu %xmm6,0(%rdi) movdqu %xmm11,16(%rdi) movdqu %xmm2,32(%rdi) movdqu %xmm7,48(%rdi) je .Ldone4x movdqa 48(%rsp),%xmm6 leaq 64(%rsi),%rsi xorq %r10,%r10 movdqa %xmm6,0(%rsp) movdqa %xmm15,16(%rsp) leaq 64(%rdi),%rdi movdqa %xmm9,32(%rsp) subq $192,%rdx movdqa %xmm3,48(%rsp) .Loop_tail4x: movzbl (%rsi,%r10,1),%eax movzbl (%rsp,%r10,1),%ecx leaq 1(%r10),%r10 xorl %ecx,%eax movb %al,-1(%rdi,%r10,1) decq %rdx jnz .Loop_tail4x .Ldone4x: leaq (%r9),%rsp .cfi_def_cfa_register rsp .L4x_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ChaCha20_ctr32_ssse3_4x,.-ChaCha20_ctr32_ssse3_4x .globl ChaCha20_ctr32_avx2 .hidden ChaCha20_ctr32_avx2 .type ChaCha20_ctr32_avx2,@function .align 32 ChaCha20_ctr32_avx2: .cfi_startproc _CET_ENDBR movq %rsp,%r9 .cfi_def_cfa_register r9 subq $0x280+8,%rsp andq $-32,%rsp vzeroupper vbroadcasti128 .Lsigma(%rip),%ymm11 vbroadcasti128 (%rcx),%ymm3 vbroadcasti128 16(%rcx),%ymm15 vbroadcasti128 (%r8),%ymm7 leaq 256(%rsp),%rcx leaq 512(%rsp),%rax leaq .Lrot16(%rip),%r10 leaq .Lrot24(%rip),%r11 vpshufd $0x00,%ymm11,%ymm8 vpshufd $0x55,%ymm11,%ymm9 vmovdqa %ymm8,128-256(%rcx) vpshufd $0xaa,%ymm11,%ymm10 vmovdqa %ymm9,160-256(%rcx) vpshufd $0xff,%ymm11,%ymm11 vmovdqa %ymm10,192-256(%rcx) vmovdqa %ymm11,224-256(%rcx) vpshufd $0x00,%ymm3,%ymm0 vpshufd $0x55,%ymm3,%ymm1 vmovdqa %ymm0,256-256(%rcx) vpshufd $0xaa,%ymm3,%ymm2 vmovdqa %ymm1,288-256(%rcx) vpshufd $0xff,%ymm3,%ymm3 vmovdqa %ymm2,320-256(%rcx) vmovdqa %ymm3,352-256(%rcx) vpshufd $0x00,%ymm15,%ymm12 vpshufd $0x55,%ymm15,%ymm13 vmovdqa %ymm12,384-512(%rax) vpshufd $0xaa,%ymm15,%ymm14 vmovdqa %ymm13,416-512(%rax) vpshufd $0xff,%ymm15,%ymm15 vmovdqa %ymm14,448-512(%rax) vmovdqa %ymm15,480-512(%rax) vpshufd $0x00,%ymm7,%ymm4 vpshufd $0x55,%ymm7,%ymm5 vpaddd .Lincy(%rip),%ymm4,%ymm4 vpshufd $0xaa,%ymm7,%ymm6 vmovdqa %ymm5,544-512(%rax) vpshufd $0xff,%ymm7,%ymm7 vmovdqa %ymm6,576-512(%rax) vmovdqa %ymm7,608-512(%rax) jmp .Loop_enter8x .align 32 .Loop_outer8x: vmovdqa 128-256(%rcx),%ymm8 vmovdqa 160-256(%rcx),%ymm9 vmovdqa 192-256(%rcx),%ymm10 vmovdqa 224-256(%rcx),%ymm11 vmovdqa 256-256(%rcx),%ymm0 vmovdqa 288-256(%rcx),%ymm1 vmovdqa 320-256(%rcx),%ymm2 vmovdqa 352-256(%rcx),%ymm3 vmovdqa 384-512(%rax),%ymm12 vmovdqa 416-512(%rax),%ymm13 vmovdqa 448-512(%rax),%ymm14 vmovdqa 480-512(%rax),%ymm15 vmovdqa 512-512(%rax),%ymm4 vmovdqa 544-512(%rax),%ymm5 vmovdqa 576-512(%rax),%ymm6 vmovdqa 608-512(%rax),%ymm7 vpaddd .Leight(%rip),%ymm4,%ymm4 .Loop_enter8x: vmovdqa %ymm14,64(%rsp) vmovdqa %ymm15,96(%rsp) vbroadcasti128 (%r10),%ymm15 vmovdqa %ymm4,512-512(%rax) movl $10,%eax jmp .Loop8x .align 32 .Loop8x: vpaddd %ymm0,%ymm8,%ymm8 vpxor %ymm4,%ymm8,%ymm4 vpshufb %ymm15,%ymm4,%ymm4 vpaddd %ymm1,%ymm9,%ymm9 vpxor %ymm5,%ymm9,%ymm5 vpshufb %ymm15,%ymm5,%ymm5 vpaddd %ymm4,%ymm12,%ymm12 vpxor %ymm0,%ymm12,%ymm0 vpslld $12,%ymm0,%ymm14 vpsrld $20,%ymm0,%ymm0 vpor %ymm0,%ymm14,%ymm0 vbroadcasti128 (%r11),%ymm14 vpaddd %ymm5,%ymm13,%ymm13 vpxor %ymm1,%ymm13,%ymm1 vpslld $12,%ymm1,%ymm15 vpsrld $20,%ymm1,%ymm1 vpor %ymm1,%ymm15,%ymm1 vpaddd %ymm0,%ymm8,%ymm8 vpxor %ymm4,%ymm8,%ymm4 vpshufb %ymm14,%ymm4,%ymm4 vpaddd %ymm1,%ymm9,%ymm9 vpxor %ymm5,%ymm9,%ymm5 vpshufb %ymm14,%ymm5,%ymm5 vpaddd %ymm4,%ymm12,%ymm12 vpxor %ymm0,%ymm12,%ymm0 vpslld $7,%ymm0,%ymm15 vpsrld $25,%ymm0,%ymm0 vpor %ymm0,%ymm15,%ymm0 vbroadcasti128 (%r10),%ymm15 vpaddd %ymm5,%ymm13,%ymm13 vpxor %ymm1,%ymm13,%ymm1 vpslld $7,%ymm1,%ymm14 vpsrld $25,%ymm1,%ymm1 vpor %ymm1,%ymm14,%ymm1 vmovdqa %ymm12,0(%rsp) vmovdqa %ymm13,32(%rsp) vmovdqa 64(%rsp),%ymm12 vmovdqa 96(%rsp),%ymm13 vpaddd %ymm2,%ymm10,%ymm10 vpxor %ymm6,%ymm10,%ymm6 vpshufb %ymm15,%ymm6,%ymm6 vpaddd %ymm3,%ymm11,%ymm11 vpxor %ymm7,%ymm11,%ymm7 vpshufb %ymm15,%ymm7,%ymm7 vpaddd %ymm6,%ymm12,%ymm12 vpxor %ymm2,%ymm12,%ymm2 vpslld $12,%ymm2,%ymm14 vpsrld $20,%ymm2,%ymm2 vpor %ymm2,%ymm14,%ymm2 vbroadcasti128 (%r11),%ymm14 vpaddd %ymm7,%ymm13,%ymm13 vpxor %ymm3,%ymm13,%ymm3 vpslld $12,%ymm3,%ymm15 vpsrld $20,%ymm3,%ymm3 vpor %ymm3,%ymm15,%ymm3 vpaddd %ymm2,%ymm10,%ymm10 vpxor %ymm6,%ymm10,%ymm6 vpshufb %ymm14,%ymm6,%ymm6 vpaddd %ymm3,%ymm11,%ymm11 vpxor %ymm7,%ymm11,%ymm7 vpshufb %ymm14,%ymm7,%ymm7 vpaddd %ymm6,%ymm12,%ymm12 vpxor %ymm2,%ymm12,%ymm2 vpslld $7,%ymm2,%ymm15 vpsrld $25,%ymm2,%ymm2 vpor %ymm2,%ymm15,%ymm2 vbroadcasti128 (%r10),%ymm15 vpaddd %ymm7,%ymm13,%ymm13 vpxor %ymm3,%ymm13,%ymm3 vpslld $7,%ymm3,%ymm14 vpsrld $25,%ymm3,%ymm3 vpor %ymm3,%ymm14,%ymm3 vpaddd %ymm1,%ymm8,%ymm8 vpxor %ymm7,%ymm8,%ymm7 vpshufb %ymm15,%ymm7,%ymm7 vpaddd %ymm2,%ymm9,%ymm9 vpxor %ymm4,%ymm9,%ymm4 vpshufb %ymm15,%ymm4,%ymm4 vpaddd %ymm7,%ymm12,%ymm12 vpxor %ymm1,%ymm12,%ymm1 vpslld $12,%ymm1,%ymm14 vpsrld $20,%ymm1,%ymm1 vpor %ymm1,%ymm14,%ymm1 vbroadcasti128 (%r11),%ymm14 vpaddd %ymm4,%ymm13,%ymm13 vpxor %ymm2,%ymm13,%ymm2 vpslld $12,%ymm2,%ymm15 vpsrld $20,%ymm2,%ymm2 vpor %ymm2,%ymm15,%ymm2 vpaddd %ymm1,%ymm8,%ymm8 vpxor %ymm7,%ymm8,%ymm7 vpshufb %ymm14,%ymm7,%ymm7 vpaddd %ymm2,%ymm9,%ymm9 vpxor %ymm4,%ymm9,%ymm4 vpshufb %ymm14,%ymm4,%ymm4 vpaddd %ymm7,%ymm12,%ymm12 vpxor %ymm1,%ymm12,%ymm1 vpslld $7,%ymm1,%ymm15 vpsrld $25,%ymm1,%ymm1 vpor %ymm1,%ymm15,%ymm1 vbroadcasti128 (%r10),%ymm15 vpaddd %ymm4,%ymm13,%ymm13 vpxor %ymm2,%ymm13,%ymm2 vpslld $7,%ymm2,%ymm14 vpsrld $25,%ymm2,%ymm2 vpor %ymm2,%ymm14,%ymm2 vmovdqa %ymm12,64(%rsp) vmovdqa %ymm13,96(%rsp) vmovdqa 0(%rsp),%ymm12 vmovdqa 32(%rsp),%ymm13 vpaddd %ymm3,%ymm10,%ymm10 vpxor %ymm5,%ymm10,%ymm5 vpshufb %ymm15,%ymm5,%ymm5 vpaddd %ymm0,%ymm11,%ymm11 vpxor %ymm6,%ymm11,%ymm6 vpshufb %ymm15,%ymm6,%ymm6 vpaddd %ymm5,%ymm12,%ymm12 vpxor %ymm3,%ymm12,%ymm3 vpslld $12,%ymm3,%ymm14 vpsrld $20,%ymm3,%ymm3 vpor %ymm3,%ymm14,%ymm3 vbroadcasti128 (%r11),%ymm14 vpaddd %ymm6,%ymm13,%ymm13 vpxor %ymm0,%ymm13,%ymm0 vpslld $12,%ymm0,%ymm15 vpsrld $20,%ymm0,%ymm0 vpor %ymm0,%ymm15,%ymm0 vpaddd %ymm3,%ymm10,%ymm10 vpxor %ymm5,%ymm10,%ymm5 vpshufb %ymm14,%ymm5,%ymm5 vpaddd %ymm0,%ymm11,%ymm11 vpxor %ymm6,%ymm11,%ymm6 vpshufb %ymm14,%ymm6,%ymm6 vpaddd %ymm5,%ymm12,%ymm12 vpxor %ymm3,%ymm12,%ymm3 vpslld $7,%ymm3,%ymm15 vpsrld $25,%ymm3,%ymm3 vpor %ymm3,%ymm15,%ymm3 vbroadcasti128 (%r10),%ymm15 vpaddd %ymm6,%ymm13,%ymm13 vpxor %ymm0,%ymm13,%ymm0 vpslld $7,%ymm0,%ymm14 vpsrld $25,%ymm0,%ymm0 vpor %ymm0,%ymm14,%ymm0 decl %eax jnz .Loop8x leaq 512(%rsp),%rax vpaddd 128-256(%rcx),%ymm8,%ymm8 vpaddd 160-256(%rcx),%ymm9,%ymm9 vpaddd 192-256(%rcx),%ymm10,%ymm10 vpaddd 224-256(%rcx),%ymm11,%ymm11 vpunpckldq %ymm9,%ymm8,%ymm14 vpunpckldq %ymm11,%ymm10,%ymm15 vpunpckhdq %ymm9,%ymm8,%ymm8 vpunpckhdq %ymm11,%ymm10,%ymm10 vpunpcklqdq %ymm15,%ymm14,%ymm9 vpunpckhqdq %ymm15,%ymm14,%ymm14 vpunpcklqdq %ymm10,%ymm8,%ymm11 vpunpckhqdq %ymm10,%ymm8,%ymm8 vpaddd 256-256(%rcx),%ymm0,%ymm0 vpaddd 288-256(%rcx),%ymm1,%ymm1 vpaddd 320-256(%rcx),%ymm2,%ymm2 vpaddd 352-256(%rcx),%ymm3,%ymm3 vpunpckldq %ymm1,%ymm0,%ymm10 vpunpckldq %ymm3,%ymm2,%ymm15 vpunpckhdq %ymm1,%ymm0,%ymm0 vpunpckhdq %ymm3,%ymm2,%ymm2 vpunpcklqdq %ymm15,%ymm10,%ymm1 vpunpckhqdq %ymm15,%ymm10,%ymm10 vpunpcklqdq %ymm2,%ymm0,%ymm3 vpunpckhqdq %ymm2,%ymm0,%ymm0 vperm2i128 $0x20,%ymm1,%ymm9,%ymm15 vperm2i128 $0x31,%ymm1,%ymm9,%ymm1 vperm2i128 $0x20,%ymm10,%ymm14,%ymm9 vperm2i128 $0x31,%ymm10,%ymm14,%ymm10 vperm2i128 $0x20,%ymm3,%ymm11,%ymm14 vperm2i128 $0x31,%ymm3,%ymm11,%ymm3 vperm2i128 $0x20,%ymm0,%ymm8,%ymm11 vperm2i128 $0x31,%ymm0,%ymm8,%ymm0 vmovdqa %ymm15,0(%rsp) vmovdqa %ymm9,32(%rsp) vmovdqa 64(%rsp),%ymm15 vmovdqa 96(%rsp),%ymm9 vpaddd 384-512(%rax),%ymm12,%ymm12 vpaddd 416-512(%rax),%ymm13,%ymm13 vpaddd 448-512(%rax),%ymm15,%ymm15 vpaddd 480-512(%rax),%ymm9,%ymm9 vpunpckldq %ymm13,%ymm12,%ymm2 vpunpckldq %ymm9,%ymm15,%ymm8 vpunpckhdq %ymm13,%ymm12,%ymm12 vpunpckhdq %ymm9,%ymm15,%ymm15 vpunpcklqdq %ymm8,%ymm2,%ymm13 vpunpckhqdq %ymm8,%ymm2,%ymm2 vpunpcklqdq %ymm15,%ymm12,%ymm9 vpunpckhqdq %ymm15,%ymm12,%ymm12 vpaddd 512-512(%rax),%ymm4,%ymm4 vpaddd 544-512(%rax),%ymm5,%ymm5 vpaddd 576-512(%rax),%ymm6,%ymm6 vpaddd 608-512(%rax),%ymm7,%ymm7 vpunpckldq %ymm5,%ymm4,%ymm15 vpunpckldq %ymm7,%ymm6,%ymm8 vpunpckhdq %ymm5,%ymm4,%ymm4 vpunpckhdq %ymm7,%ymm6,%ymm6 vpunpcklqdq %ymm8,%ymm15,%ymm5 vpunpckhqdq %ymm8,%ymm15,%ymm15 vpunpcklqdq %ymm6,%ymm4,%ymm7 vpunpckhqdq %ymm6,%ymm4,%ymm4 vperm2i128 $0x20,%ymm5,%ymm13,%ymm8 vperm2i128 $0x31,%ymm5,%ymm13,%ymm5 vperm2i128 $0x20,%ymm15,%ymm2,%ymm13 vperm2i128 $0x31,%ymm15,%ymm2,%ymm15 vperm2i128 $0x20,%ymm7,%ymm9,%ymm2 vperm2i128 $0x31,%ymm7,%ymm9,%ymm7 vperm2i128 $0x20,%ymm4,%ymm12,%ymm9 vperm2i128 $0x31,%ymm4,%ymm12,%ymm4 vmovdqa 0(%rsp),%ymm6 vmovdqa 32(%rsp),%ymm12 cmpq $512,%rdx jb .Ltail8x vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 leaq 128(%rsi),%rsi vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) leaq 128(%rdi),%rdi vpxor 0(%rsi),%ymm12,%ymm12 vpxor 32(%rsi),%ymm13,%ymm13 vpxor 64(%rsi),%ymm10,%ymm10 vpxor 96(%rsi),%ymm15,%ymm15 leaq 128(%rsi),%rsi vmovdqu %ymm12,0(%rdi) vmovdqu %ymm13,32(%rdi) vmovdqu %ymm10,64(%rdi) vmovdqu %ymm15,96(%rdi) leaq 128(%rdi),%rdi vpxor 0(%rsi),%ymm14,%ymm14 vpxor 32(%rsi),%ymm2,%ymm2 vpxor 64(%rsi),%ymm3,%ymm3 vpxor 96(%rsi),%ymm7,%ymm7 leaq 128(%rsi),%rsi vmovdqu %ymm14,0(%rdi) vmovdqu %ymm2,32(%rdi) vmovdqu %ymm3,64(%rdi) vmovdqu %ymm7,96(%rdi) leaq 128(%rdi),%rdi vpxor 0(%rsi),%ymm11,%ymm11 vpxor 32(%rsi),%ymm9,%ymm9 vpxor 64(%rsi),%ymm0,%ymm0 vpxor 96(%rsi),%ymm4,%ymm4 leaq 128(%rsi),%rsi vmovdqu %ymm11,0(%rdi) vmovdqu %ymm9,32(%rdi) vmovdqu %ymm0,64(%rdi) vmovdqu %ymm4,96(%rdi) leaq 128(%rdi),%rdi subq $512,%rdx jnz .Loop_outer8x jmp .Ldone8x .Ltail8x: cmpq $448,%rdx jae .L448_or_more8x cmpq $384,%rdx jae .L384_or_more8x cmpq $320,%rdx jae .L320_or_more8x cmpq $256,%rdx jae .L256_or_more8x cmpq $192,%rdx jae .L192_or_more8x cmpq $128,%rdx jae .L128_or_more8x cmpq $64,%rdx jae .L64_or_more8x xorq %r10,%r10 vmovdqa %ymm6,0(%rsp) vmovdqa %ymm8,32(%rsp) jmp .Loop_tail8x .align 32 .L64_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) je .Ldone8x leaq 64(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm1,0(%rsp) leaq 64(%rdi),%rdi subq $64,%rdx vmovdqa %ymm5,32(%rsp) jmp .Loop_tail8x .align 32 .L128_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) je .Ldone8x leaq 128(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm12,0(%rsp) leaq 128(%rdi),%rdi subq $128,%rdx vmovdqa %ymm13,32(%rsp) jmp .Loop_tail8x .align 32 .L192_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) je .Ldone8x leaq 192(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm10,0(%rsp) leaq 192(%rdi),%rdi subq $192,%rdx vmovdqa %ymm15,32(%rsp) jmp .Loop_tail8x .align 32 .L256_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vpxor 192(%rsi),%ymm10,%ymm10 vpxor 224(%rsi),%ymm15,%ymm15 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) vmovdqu %ymm10,192(%rdi) vmovdqu %ymm15,224(%rdi) je .Ldone8x leaq 256(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm14,0(%rsp) leaq 256(%rdi),%rdi subq $256,%rdx vmovdqa %ymm2,32(%rsp) jmp .Loop_tail8x .align 32 .L320_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vpxor 192(%rsi),%ymm10,%ymm10 vpxor 224(%rsi),%ymm15,%ymm15 vpxor 256(%rsi),%ymm14,%ymm14 vpxor 288(%rsi),%ymm2,%ymm2 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) vmovdqu %ymm10,192(%rdi) vmovdqu %ymm15,224(%rdi) vmovdqu %ymm14,256(%rdi) vmovdqu %ymm2,288(%rdi) je .Ldone8x leaq 320(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm3,0(%rsp) leaq 320(%rdi),%rdi subq $320,%rdx vmovdqa %ymm7,32(%rsp) jmp .Loop_tail8x .align 32 .L384_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vpxor 192(%rsi),%ymm10,%ymm10 vpxor 224(%rsi),%ymm15,%ymm15 vpxor 256(%rsi),%ymm14,%ymm14 vpxor 288(%rsi),%ymm2,%ymm2 vpxor 320(%rsi),%ymm3,%ymm3 vpxor 352(%rsi),%ymm7,%ymm7 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) vmovdqu %ymm10,192(%rdi) vmovdqu %ymm15,224(%rdi) vmovdqu %ymm14,256(%rdi) vmovdqu %ymm2,288(%rdi) vmovdqu %ymm3,320(%rdi) vmovdqu %ymm7,352(%rdi) je .Ldone8x leaq 384(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm11,0(%rsp) leaq 384(%rdi),%rdi subq $384,%rdx vmovdqa %ymm9,32(%rsp) jmp .Loop_tail8x .align 32 .L448_or_more8x: vpxor 0(%rsi),%ymm6,%ymm6 vpxor 32(%rsi),%ymm8,%ymm8 vpxor 64(%rsi),%ymm1,%ymm1 vpxor 96(%rsi),%ymm5,%ymm5 vpxor 128(%rsi),%ymm12,%ymm12 vpxor 160(%rsi),%ymm13,%ymm13 vpxor 192(%rsi),%ymm10,%ymm10 vpxor 224(%rsi),%ymm15,%ymm15 vpxor 256(%rsi),%ymm14,%ymm14 vpxor 288(%rsi),%ymm2,%ymm2 vpxor 320(%rsi),%ymm3,%ymm3 vpxor 352(%rsi),%ymm7,%ymm7 vpxor 384(%rsi),%ymm11,%ymm11 vpxor 416(%rsi),%ymm9,%ymm9 vmovdqu %ymm6,0(%rdi) vmovdqu %ymm8,32(%rdi) vmovdqu %ymm1,64(%rdi) vmovdqu %ymm5,96(%rdi) vmovdqu %ymm12,128(%rdi) vmovdqu %ymm13,160(%rdi) vmovdqu %ymm10,192(%rdi) vmovdqu %ymm15,224(%rdi) vmovdqu %ymm14,256(%rdi) vmovdqu %ymm2,288(%rdi) vmovdqu %ymm3,320(%rdi) vmovdqu %ymm7,352(%rdi) vmovdqu %ymm11,384(%rdi) vmovdqu %ymm9,416(%rdi) je .Ldone8x leaq 448(%rsi),%rsi xorq %r10,%r10 vmovdqa %ymm0,0(%rsp) leaq 448(%rdi),%rdi subq $448,%rdx vmovdqa %ymm4,32(%rsp) .Loop_tail8x: movzbl (%rsi,%r10,1),%eax movzbl (%rsp,%r10,1),%ecx leaq 1(%r10),%r10 xorl %ecx,%eax movb %al,-1(%rdi,%r10,1) decq %rdx jnz .Loop_tail8x .Ldone8x: vzeroall leaq (%r9),%rsp .cfi_def_cfa_register rsp .L8x_epilogue: .byte 0xf3,0xc3 .cfi_endproc .size ChaCha20_ctr32_avx2,.-ChaCha20_ctr32_avx2 #endif
marvin-hansen/iggy-streaming-system
192,912
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .extern OPENSSL_ia32cap_P .hidden OPENSSL_ia32cap_P chacha20_poly1305_constants: .section .rodata .align 64 .Lchacha20_consts: .byte 'e','x','p','a','n','d',' ','3','2','-','b','y','t','e',' ','k' .byte 'e','x','p','a','n','d',' ','3','2','-','b','y','t','e',' ','k' .Lrol8: .byte 3,0,1,2, 7,4,5,6, 11,8,9,10, 15,12,13,14 .byte 3,0,1,2, 7,4,5,6, 11,8,9,10, 15,12,13,14 .Lrol16: .byte 2,3,0,1, 6,7,4,5, 10,11,8,9, 14,15,12,13 .byte 2,3,0,1, 6,7,4,5, 10,11,8,9, 14,15,12,13 .Lavx2_init: .long 0,0,0,0 .Lsse_inc: .long 1,0,0,0 .Lavx2_inc: .long 2,0,0,0,2,0,0,0 .Lclamp: .quad 0x0FFFFFFC0FFFFFFF, 0x0FFFFFFC0FFFFFFC .quad 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF .align 16 .Land_masks: .byte 0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00 .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff .text .type poly_hash_ad_internal,@function .align 64 poly_hash_ad_internal: .cfi_startproc .cfi_def_cfa rsp, 8 xorq %r10,%r10 xorq %r11,%r11 xorq %r12,%r12 cmpq $13,%r8 jne .Lhash_ad_loop .Lpoly_fast_tls_ad: movq (%rcx),%r10 movq 5(%rcx),%r11 shrq $24,%r11 movq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .byte 0xf3,0xc3 .Lhash_ad_loop: cmpq $16,%r8 jb .Lhash_ad_tail addq 0+0(%rcx),%r10 adcq 8+0(%rcx),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rcx),%rcx subq $16,%r8 jmp .Lhash_ad_loop .Lhash_ad_tail: cmpq $0,%r8 je .Lhash_ad_done xorq %r13,%r13 xorq %r14,%r14 xorq %r15,%r15 addq %r8,%rcx .Lhash_ad_tail_loop: shldq $8,%r13,%r14 shlq $8,%r13 movzbq -1(%rcx),%r15 xorq %r15,%r13 decq %rcx decq %r8 jne .Lhash_ad_tail_loop addq %r13,%r10 adcq %r14,%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Lhash_ad_done: .byte 0xf3,0xc3 .cfi_endproc .size poly_hash_ad_internal, .-poly_hash_ad_internal .globl chacha20_poly1305_open .hidden chacha20_poly1305_open .type chacha20_poly1305_open,@function .align 64 chacha20_poly1305_open: .cfi_startproc _CET_ENDBR pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 pushq %r9 .cfi_adjust_cfa_offset 8 .cfi_offset %r9,-64 subq $288 + 0 + 32,%rsp .cfi_adjust_cfa_offset 288 + 32 leaq 32(%rsp),%rbp andq $-32,%rbp movq %rdx,%rbx movq %r8,0+0+32(%rbp) movq %rbx,8+0+32(%rbp) movl OPENSSL_ia32cap_P+8(%rip),%eax andl $288,%eax xorl $288,%eax jz chacha20_poly1305_open_avx2 cmpq $128,%rbx jbe .Lopen_sse_128 movdqa .Lchacha20_consts(%rip),%xmm0 movdqu 0(%r9),%xmm4 movdqu 16(%r9),%xmm8 movdqu 32(%r9),%xmm12 movdqa %xmm12,%xmm7 movdqa %xmm4,0+48(%rbp) movdqa %xmm8,0+64(%rbp) movdqa %xmm12,0+96(%rbp) movq $10,%r10 .Lopen_sse_init_rounds: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 decq %r10 jne .Lopen_sse_init_rounds paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 pand .Lclamp(%rip),%xmm0 movdqa %xmm0,0+0(%rbp) movdqa %xmm4,0+16(%rbp) movq %r8,%r8 call poly_hash_ad_internal .Lopen_sse_main_loop: cmpq $256,%rbx jb .Lopen_sse_tail movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa %xmm0,%xmm3 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa 0+96(%rbp),%xmm15 paddd .Lsse_inc(%rip),%xmm15 movdqa %xmm15,%xmm14 paddd .Lsse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movdqa %xmm15,0+144(%rbp) movq $4,%rcx movq %rsi,%r8 .Lopen_sse_main_loop_rounds: movdqa %xmm8,0+80(%rbp) movdqa .Lrol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 leaq 16(%r8),%r8 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movdqa .Lrol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 .byte 102,15,58,15,255,4 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,12 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 movdqa %xmm8,0+80(%rbp) movdqa .Lrol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movdqa .Lrol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 .byte 102,15,58,15,255,12 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,4 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 decq %rcx jge .Lopen_sse_main_loop_rounds addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 cmpq $-6,%rcx jg .Lopen_sse_main_loop_rounds paddd .Lchacha20_consts(%rip),%xmm3 paddd 0+48(%rbp),%xmm7 paddd 0+64(%rbp),%xmm11 paddd 0+144(%rbp),%xmm15 paddd .Lchacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd .Lchacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqa %xmm12,0+80(%rbp) movdqu 0 + 0(%rsi),%xmm12 pxor %xmm3,%xmm12 movdqu %xmm12,0 + 0(%rdi) movdqu 16 + 0(%rsi),%xmm12 pxor %xmm7,%xmm12 movdqu %xmm12,16 + 0(%rdi) movdqu 32 + 0(%rsi),%xmm12 pxor %xmm11,%xmm12 movdqu %xmm12,32 + 0(%rdi) movdqu 48 + 0(%rsi),%xmm12 pxor %xmm15,%xmm12 movdqu %xmm12,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 64(%rdi) movdqu %xmm6,16 + 64(%rdi) movdqu %xmm10,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) movdqu 0 + 128(%rsi),%xmm3 movdqu 16 + 128(%rsi),%xmm7 movdqu 32 + 128(%rsi),%xmm11 movdqu 48 + 128(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 128(%rdi) movdqu %xmm5,16 + 128(%rdi) movdqu %xmm9,32 + 128(%rdi) movdqu %xmm15,48 + 128(%rdi) movdqu 0 + 192(%rsi),%xmm3 movdqu 16 + 192(%rsi),%xmm7 movdqu 32 + 192(%rsi),%xmm11 movdqu 48 + 192(%rsi),%xmm15 pxor %xmm3,%xmm0 pxor %xmm7,%xmm4 pxor %xmm11,%xmm8 pxor 0+80(%rbp),%xmm15 movdqu %xmm0,0 + 192(%rdi) movdqu %xmm4,16 + 192(%rdi) movdqu %xmm8,32 + 192(%rdi) movdqu %xmm15,48 + 192(%rdi) leaq 256(%rsi),%rsi leaq 256(%rdi),%rdi subq $256,%rbx jmp .Lopen_sse_main_loop .Lopen_sse_tail: testq %rbx,%rbx jz .Lopen_sse_finalize cmpq $192,%rbx ja .Lopen_sse_tail_256 cmpq $128,%rbx ja .Lopen_sse_tail_192 cmpq $64,%rbx ja .Lopen_sse_tail_128 movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa 0+96(%rbp),%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) xorq %r8,%r8 movq %rbx,%rcx cmpq $16,%rcx jb .Lopen_sse_tail_64_rounds .Lopen_sse_tail_64_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 subq $16,%rcx .Lopen_sse_tail_64_rounds: addq $16,%r8 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 cmpq $16,%rcx jae .Lopen_sse_tail_64_rounds_and_x1hash cmpq $160,%r8 jne .Lopen_sse_tail_64_rounds paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 jmp .Lopen_sse_tail_64_dec_loop .Lopen_sse_tail_128: movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa 0+96(%rbp),%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movq %rbx,%rcx andq $-16,%rcx xorq %r8,%r8 .Lopen_sse_tail_128_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Lopen_sse_tail_128_rounds: addq $16,%r8 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 cmpq %rcx,%r8 jb .Lopen_sse_tail_128_rounds_and_x1hash cmpq $160,%r8 jne .Lopen_sse_tail_128_rounds paddd .Lchacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 0(%rdi) movdqu %xmm5,16 + 0(%rdi) movdqu %xmm9,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) subq $64,%rbx leaq 64(%rsi),%rsi leaq 64(%rdi),%rdi jmp .Lopen_sse_tail_64_dec_loop .Lopen_sse_tail_192: movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa 0+96(%rbp),%xmm14 paddd .Lsse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movq %rbx,%rcx movq $160,%r8 cmpq $160,%rcx cmovgq %r8,%rcx andq $-16,%rcx xorq %r8,%r8 .Lopen_sse_tail_192_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Lopen_sse_tail_192_rounds: addq $16,%r8 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 cmpq %rcx,%r8 jb .Lopen_sse_tail_192_rounds_and_x1hash cmpq $160,%r8 jne .Lopen_sse_tail_192_rounds cmpq $176,%rbx jb .Lopen_sse_tail_192_finish addq 0+160(%rsi),%r10 adcq 8+160(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 cmpq $192,%rbx jb .Lopen_sse_tail_192_finish addq 0+176(%rsi),%r10 adcq 8+176(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Lopen_sse_tail_192_finish: paddd .Lchacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd .Lchacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 0(%rdi) movdqu %xmm6,16 + 0(%rdi) movdqu %xmm10,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 64(%rdi) movdqu %xmm5,16 + 64(%rdi) movdqu %xmm9,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) subq $128,%rbx leaq 128(%rsi),%rsi leaq 128(%rdi),%rdi jmp .Lopen_sse_tail_64_dec_loop .Lopen_sse_tail_256: movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa %xmm0,%xmm3 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa 0+96(%rbp),%xmm15 paddd .Lsse_inc(%rip),%xmm15 movdqa %xmm15,%xmm14 paddd .Lsse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movdqa %xmm15,0+144(%rbp) xorq %r8,%r8 .Lopen_sse_tail_256_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movdqa %xmm11,0+80(%rbp) paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm11 pslld $12,%xmm11 psrld $20,%xmm4 pxor %xmm11,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm11 pslld $7,%xmm11 psrld $25,%xmm4 pxor %xmm11,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm11 pslld $12,%xmm11 psrld $20,%xmm5 pxor %xmm11,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm11 pslld $7,%xmm11 psrld $25,%xmm5 pxor %xmm11,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm11 pslld $12,%xmm11 psrld $20,%xmm6 pxor %xmm11,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm11 pslld $7,%xmm11 psrld $25,%xmm6 pxor %xmm11,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 movdqa 0+80(%rbp),%xmm11 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movdqa %xmm9,0+80(%rbp) paddd %xmm7,%xmm3 pxor %xmm3,%xmm15 pshufb .Lrol16(%rip),%xmm15 paddd %xmm15,%xmm11 pxor %xmm11,%xmm7 movdqa %xmm7,%xmm9 pslld $12,%xmm9 psrld $20,%xmm7 pxor %xmm9,%xmm7 paddd %xmm7,%xmm3 pxor %xmm3,%xmm15 pshufb .Lrol8(%rip),%xmm15 paddd %xmm15,%xmm11 pxor %xmm11,%xmm7 movdqa %xmm7,%xmm9 pslld $7,%xmm9 psrld $25,%xmm7 pxor %xmm9,%xmm7 .byte 102,15,58,15,255,4 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,12 movdqa 0+80(%rbp),%xmm9 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx movdqa %xmm11,0+80(%rbp) paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm11 pslld $12,%xmm11 psrld $20,%xmm4 pxor %xmm11,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm11 pslld $7,%xmm11 psrld $25,%xmm4 pxor %xmm11,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm11 pslld $12,%xmm11 psrld $20,%xmm5 pxor %xmm11,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm11 pslld $7,%xmm11 psrld $25,%xmm5 pxor %xmm11,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm11 pslld $12,%xmm11 psrld $20,%xmm6 pxor %xmm11,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm11 pslld $7,%xmm11 psrld $25,%xmm6 pxor %xmm11,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 movdqa 0+80(%rbp),%xmm11 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movdqa %xmm9,0+80(%rbp) paddd %xmm7,%xmm3 pxor %xmm3,%xmm15 pshufb .Lrol16(%rip),%xmm15 paddd %xmm15,%xmm11 pxor %xmm11,%xmm7 movdqa %xmm7,%xmm9 pslld $12,%xmm9 psrld $20,%xmm7 pxor %xmm9,%xmm7 paddd %xmm7,%xmm3 pxor %xmm3,%xmm15 pshufb .Lrol8(%rip),%xmm15 paddd %xmm15,%xmm11 pxor %xmm11,%xmm7 movdqa %xmm7,%xmm9 pslld $7,%xmm9 psrld $25,%xmm7 pxor %xmm9,%xmm7 .byte 102,15,58,15,255,12 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,4 movdqa 0+80(%rbp),%xmm9 addq $16,%r8 cmpq $160,%r8 jb .Lopen_sse_tail_256_rounds_and_x1hash movq %rbx,%rcx andq $-16,%rcx .Lopen_sse_tail_256_hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq $16,%r8 cmpq %rcx,%r8 jb .Lopen_sse_tail_256_hash paddd .Lchacha20_consts(%rip),%xmm3 paddd 0+48(%rbp),%xmm7 paddd 0+64(%rbp),%xmm11 paddd 0+144(%rbp),%xmm15 paddd .Lchacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd .Lchacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqa %xmm12,0+80(%rbp) movdqu 0 + 0(%rsi),%xmm12 pxor %xmm3,%xmm12 movdqu %xmm12,0 + 0(%rdi) movdqu 16 + 0(%rsi),%xmm12 pxor %xmm7,%xmm12 movdqu %xmm12,16 + 0(%rdi) movdqu 32 + 0(%rsi),%xmm12 pxor %xmm11,%xmm12 movdqu %xmm12,32 + 0(%rdi) movdqu 48 + 0(%rsi),%xmm12 pxor %xmm15,%xmm12 movdqu %xmm12,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 64(%rdi) movdqu %xmm6,16 + 64(%rdi) movdqu %xmm10,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) movdqu 0 + 128(%rsi),%xmm3 movdqu 16 + 128(%rsi),%xmm7 movdqu 32 + 128(%rsi),%xmm11 movdqu 48 + 128(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 128(%rdi) movdqu %xmm5,16 + 128(%rdi) movdqu %xmm9,32 + 128(%rdi) movdqu %xmm15,48 + 128(%rdi) movdqa 0+80(%rbp),%xmm12 subq $192,%rbx leaq 192(%rsi),%rsi leaq 192(%rdi),%rdi .Lopen_sse_tail_64_dec_loop: cmpq $16,%rbx jb .Lopen_sse_tail_16_init subq $16,%rbx movdqu (%rsi),%xmm3 pxor %xmm3,%xmm0 movdqu %xmm0,(%rdi) leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi movdqa %xmm4,%xmm0 movdqa %xmm8,%xmm4 movdqa %xmm12,%xmm8 jmp .Lopen_sse_tail_64_dec_loop .Lopen_sse_tail_16_init: movdqa %xmm0,%xmm1 .Lopen_sse_tail_16: testq %rbx,%rbx jz .Lopen_sse_finalize pxor %xmm3,%xmm3 leaq -1(%rsi,%rbx,1),%rsi movq %rbx,%r8 .Lopen_sse_tail_16_compose: pslldq $1,%xmm3 pinsrb $0,(%rsi),%xmm3 subq $1,%rsi subq $1,%r8 jnz .Lopen_sse_tail_16_compose .byte 102,73,15,126,221 pextrq $1,%xmm3,%r14 pxor %xmm1,%xmm3 .Lopen_sse_tail_16_extract: pextrb $0,%xmm3,(%rdi) psrldq $1,%xmm3 addq $1,%rdi subq $1,%rbx jne .Lopen_sse_tail_16_extract addq %r13,%r10 adcq %r14,%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Lopen_sse_finalize: addq 0+0+32(%rbp),%r10 adcq 8+0+32(%rbp),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movq %r10,%r13 movq %r11,%r14 movq %r12,%r15 subq $-5,%r10 sbbq $-1,%r11 sbbq $3,%r12 cmovcq %r13,%r10 cmovcq %r14,%r11 cmovcq %r15,%r12 addq 0+0+16(%rbp),%r10 adcq 8+0+16(%rbp),%r11 .cfi_remember_state addq $288 + 0 + 32,%rsp .cfi_adjust_cfa_offset -(288 + 32) popq %r9 .cfi_adjust_cfa_offset -8 .cfi_restore %r9 movq %r10,(%r9) movq %r11,8(%r9) popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore %r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore %r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore %rbx popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp .byte 0xf3,0xc3 .Lopen_sse_128: .cfi_restore_state movdqu .Lchacha20_consts(%rip),%xmm0 movdqa %xmm0,%xmm1 movdqa %xmm0,%xmm2 movdqu 0(%r9),%xmm4 movdqa %xmm4,%xmm5 movdqa %xmm4,%xmm6 movdqu 16(%r9),%xmm8 movdqa %xmm8,%xmm9 movdqa %xmm8,%xmm10 movdqu 32(%r9),%xmm12 movdqa %xmm12,%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm13,%xmm14 paddd .Lsse_inc(%rip),%xmm14 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa %xmm13,%xmm15 movq $10,%r10 .Lopen_sse_128_rounds: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 decq %r10 jnz .Lopen_sse_128_rounds paddd .Lchacha20_consts(%rip),%xmm0 paddd .Lchacha20_consts(%rip),%xmm1 paddd .Lchacha20_consts(%rip),%xmm2 paddd %xmm7,%xmm4 paddd %xmm7,%xmm5 paddd %xmm7,%xmm6 paddd %xmm11,%xmm9 paddd %xmm11,%xmm10 paddd %xmm15,%xmm13 paddd .Lsse_inc(%rip),%xmm15 paddd %xmm15,%xmm14 pand .Lclamp(%rip),%xmm0 movdqa %xmm0,0+0(%rbp) movdqa %xmm4,0+16(%rbp) movq %r8,%r8 call poly_hash_ad_internal .Lopen_sse_128_xor_hash: cmpq $16,%rbx jb .Lopen_sse_tail_16 subq $16,%rbx addq 0+0(%rsi),%r10 adcq 8+0(%rsi),%r11 adcq $1,%r12 movdqu 0(%rsi),%xmm3 pxor %xmm3,%xmm1 movdqu %xmm1,0(%rdi) leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movdqa %xmm5,%xmm1 movdqa %xmm9,%xmm5 movdqa %xmm13,%xmm9 movdqa %xmm2,%xmm13 movdqa %xmm6,%xmm2 movdqa %xmm10,%xmm6 movdqa %xmm14,%xmm10 jmp .Lopen_sse_128_xor_hash .size chacha20_poly1305_open, .-chacha20_poly1305_open .cfi_endproc .globl chacha20_poly1305_seal .hidden chacha20_poly1305_seal .type chacha20_poly1305_seal,@function .align 64 chacha20_poly1305_seal: .cfi_startproc _CET_ENDBR pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 pushq %r9 .cfi_adjust_cfa_offset 8 .cfi_offset %r9,-64 subq $288 + 0 + 32,%rsp .cfi_adjust_cfa_offset 288 + 32 leaq 32(%rsp),%rbp andq $-32,%rbp movq 56(%r9),%rbx addq %rdx,%rbx movq %r8,0+0+32(%rbp) movq %rbx,8+0+32(%rbp) movq %rdx,%rbx movl OPENSSL_ia32cap_P+8(%rip),%eax andl $288,%eax xorl $288,%eax jz chacha20_poly1305_seal_avx2 cmpq $128,%rbx jbe .Lseal_sse_128 movdqa .Lchacha20_consts(%rip),%xmm0 movdqu 0(%r9),%xmm4 movdqu 16(%r9),%xmm8 movdqu 32(%r9),%xmm12 movdqa %xmm0,%xmm1 movdqa %xmm0,%xmm2 movdqa %xmm0,%xmm3 movdqa %xmm4,%xmm5 movdqa %xmm4,%xmm6 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm9 movdqa %xmm8,%xmm10 movdqa %xmm8,%xmm11 movdqa %xmm12,%xmm15 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,%xmm14 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,%xmm13 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm4,0+48(%rbp) movdqa %xmm8,0+64(%rbp) movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movdqa %xmm15,0+144(%rbp) movq $10,%r10 .Lseal_sse_init_rounds: movdqa %xmm8,0+80(%rbp) movdqa .Lrol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movdqa .Lrol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 .byte 102,15,58,15,255,4 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,12 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 movdqa %xmm8,0+80(%rbp) movdqa .Lrol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movdqa .Lrol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 .byte 102,15,58,15,255,12 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,4 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 decq %r10 jnz .Lseal_sse_init_rounds paddd .Lchacha20_consts(%rip),%xmm3 paddd 0+48(%rbp),%xmm7 paddd 0+64(%rbp),%xmm11 paddd 0+144(%rbp),%xmm15 paddd .Lchacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd .Lchacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 pand .Lclamp(%rip),%xmm3 movdqa %xmm3,0+0(%rbp) movdqa %xmm7,0+16(%rbp) movq %r8,%r8 call poly_hash_ad_internal movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 0(%rdi) movdqu %xmm6,16 + 0(%rdi) movdqu %xmm10,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 64(%rdi) movdqu %xmm5,16 + 64(%rdi) movdqu %xmm9,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) cmpq $192,%rbx ja .Lseal_sse_main_init movq $128,%rcx subq $128,%rbx leaq 128(%rsi),%rsi jmp .Lseal_sse_128_tail_hash .Lseal_sse_main_init: movdqu 0 + 128(%rsi),%xmm3 movdqu 16 + 128(%rsi),%xmm7 movdqu 32 + 128(%rsi),%xmm11 movdqu 48 + 128(%rsi),%xmm15 pxor %xmm3,%xmm0 pxor %xmm7,%xmm4 pxor %xmm11,%xmm8 pxor %xmm12,%xmm15 movdqu %xmm0,0 + 128(%rdi) movdqu %xmm4,16 + 128(%rdi) movdqu %xmm8,32 + 128(%rdi) movdqu %xmm15,48 + 128(%rdi) movq $192,%rcx subq $192,%rbx leaq 192(%rsi),%rsi movq $2,%rcx movq $8,%r8 cmpq $64,%rbx jbe .Lseal_sse_tail_64 cmpq $128,%rbx jbe .Lseal_sse_tail_128 cmpq $192,%rbx jbe .Lseal_sse_tail_192 .Lseal_sse_main_loop: movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa %xmm0,%xmm3 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa 0+96(%rbp),%xmm15 paddd .Lsse_inc(%rip),%xmm15 movdqa %xmm15,%xmm14 paddd .Lsse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) movdqa %xmm15,0+144(%rbp) .align 32 .Lseal_sse_main_rounds: movdqa %xmm8,0+80(%rbp) movdqa .Lrol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movdqa .Lrol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 .byte 102,15,58,15,255,4 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,12 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 movdqa %xmm8,0+80(%rbp) movdqa .Lrol16(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $20,%xmm8 pslld $32-20,%xmm4 pxor %xmm8,%xmm4 movdqa .Lrol8(%rip),%xmm8 paddd %xmm7,%xmm3 paddd %xmm6,%xmm2 paddd %xmm5,%xmm1 paddd %xmm4,%xmm0 pxor %xmm3,%xmm15 pxor %xmm2,%xmm14 pxor %xmm1,%xmm13 pxor %xmm0,%xmm12 .byte 102,69,15,56,0,248 .byte 102,69,15,56,0,240 .byte 102,69,15,56,0,232 .byte 102,69,15,56,0,224 movdqa 0+80(%rbp),%xmm8 paddd %xmm15,%xmm11 paddd %xmm14,%xmm10 paddd %xmm13,%xmm9 paddd %xmm12,%xmm8 pxor %xmm11,%xmm7 pxor %xmm10,%xmm6 pxor %xmm9,%xmm5 pxor %xmm8,%xmm4 movdqa %xmm8,0+80(%rbp) movdqa %xmm7,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm7 pxor %xmm8,%xmm7 movdqa %xmm6,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm6 pxor %xmm8,%xmm6 movdqa %xmm5,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm5 pxor %xmm8,%xmm5 movdqa %xmm4,%xmm8 psrld $25,%xmm8 pslld $32-25,%xmm4 pxor %xmm8,%xmm4 movdqa 0+80(%rbp),%xmm8 .byte 102,15,58,15,255,12 .byte 102,69,15,58,15,219,8 .byte 102,69,15,58,15,255,4 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 leaq 16(%rdi),%rdi decq %r8 jge .Lseal_sse_main_rounds addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi decq %rcx jg .Lseal_sse_main_rounds paddd .Lchacha20_consts(%rip),%xmm3 paddd 0+48(%rbp),%xmm7 paddd 0+64(%rbp),%xmm11 paddd 0+144(%rbp),%xmm15 paddd .Lchacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd .Lchacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqa %xmm14,0+80(%rbp) movdqa %xmm14,0+80(%rbp) movdqu 0 + 0(%rsi),%xmm14 pxor %xmm3,%xmm14 movdqu %xmm14,0 + 0(%rdi) movdqu 16 + 0(%rsi),%xmm14 pxor %xmm7,%xmm14 movdqu %xmm14,16 + 0(%rdi) movdqu 32 + 0(%rsi),%xmm14 pxor %xmm11,%xmm14 movdqu %xmm14,32 + 0(%rdi) movdqu 48 + 0(%rsi),%xmm14 pxor %xmm15,%xmm14 movdqu %xmm14,48 + 0(%rdi) movdqa 0+80(%rbp),%xmm14 movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 64(%rdi) movdqu %xmm6,16 + 64(%rdi) movdqu %xmm10,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) movdqu 0 + 128(%rsi),%xmm3 movdqu 16 + 128(%rsi),%xmm7 movdqu 32 + 128(%rsi),%xmm11 movdqu 48 + 128(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 128(%rdi) movdqu %xmm5,16 + 128(%rdi) movdqu %xmm9,32 + 128(%rdi) movdqu %xmm15,48 + 128(%rdi) cmpq $256,%rbx ja .Lseal_sse_main_loop_xor movq $192,%rcx subq $192,%rbx leaq 192(%rsi),%rsi jmp .Lseal_sse_128_tail_hash .Lseal_sse_main_loop_xor: movdqu 0 + 192(%rsi),%xmm3 movdqu 16 + 192(%rsi),%xmm7 movdqu 32 + 192(%rsi),%xmm11 movdqu 48 + 192(%rsi),%xmm15 pxor %xmm3,%xmm0 pxor %xmm7,%xmm4 pxor %xmm11,%xmm8 pxor %xmm12,%xmm15 movdqu %xmm0,0 + 192(%rdi) movdqu %xmm4,16 + 192(%rdi) movdqu %xmm8,32 + 192(%rdi) movdqu %xmm15,48 + 192(%rdi) leaq 256(%rsi),%rsi subq $256,%rbx movq $6,%rcx movq $4,%r8 cmpq $192,%rbx jg .Lseal_sse_main_loop movq %rbx,%rcx testq %rbx,%rbx je .Lseal_sse_128_tail_hash movq $6,%rcx cmpq $128,%rbx ja .Lseal_sse_tail_192 cmpq $64,%rbx ja .Lseal_sse_tail_128 .Lseal_sse_tail_64: movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa 0+96(%rbp),%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) .Lseal_sse_tail_64_rounds_and_x2hash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi .Lseal_sse_tail_64_rounds_and_x1hash: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi decq %rcx jg .Lseal_sse_tail_64_rounds_and_x2hash decq %r8 jge .Lseal_sse_tail_64_rounds_and_x1hash paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 jmp .Lseal_sse_128_tail_xor .Lseal_sse_tail_128: movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa 0+96(%rbp),%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) .Lseal_sse_tail_128_rounds_and_x2hash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi .Lseal_sse_tail_128_rounds_and_x1hash: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 leaq 16(%rdi),%rdi decq %rcx jg .Lseal_sse_tail_128_rounds_and_x2hash decq %r8 jge .Lseal_sse_tail_128_rounds_and_x1hash paddd .Lchacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 0(%rdi) movdqu %xmm5,16 + 0(%rdi) movdqu %xmm9,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) movq $64,%rcx subq $64,%rbx leaq 64(%rsi),%rsi jmp .Lseal_sse_128_tail_hash .Lseal_sse_tail_192: movdqa .Lchacha20_consts(%rip),%xmm0 movdqa 0+48(%rbp),%xmm4 movdqa 0+64(%rbp),%xmm8 movdqa %xmm0,%xmm1 movdqa %xmm4,%xmm5 movdqa %xmm8,%xmm9 movdqa %xmm0,%xmm2 movdqa %xmm4,%xmm6 movdqa %xmm8,%xmm10 movdqa 0+96(%rbp),%xmm14 paddd .Lsse_inc(%rip),%xmm14 movdqa %xmm14,%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm13,%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,0+96(%rbp) movdqa %xmm13,0+112(%rbp) movdqa %xmm14,0+128(%rbp) .Lseal_sse_tail_192_rounds_and_x2hash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi .Lseal_sse_tail_192_rounds_and_x1hash: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 leaq 16(%rdi),%rdi decq %rcx jg .Lseal_sse_tail_192_rounds_and_x2hash decq %r8 jge .Lseal_sse_tail_192_rounds_and_x1hash paddd .Lchacha20_consts(%rip),%xmm2 paddd 0+48(%rbp),%xmm6 paddd 0+64(%rbp),%xmm10 paddd 0+128(%rbp),%xmm14 paddd .Lchacha20_consts(%rip),%xmm1 paddd 0+48(%rbp),%xmm5 paddd 0+64(%rbp),%xmm9 paddd 0+112(%rbp),%xmm13 paddd .Lchacha20_consts(%rip),%xmm0 paddd 0+48(%rbp),%xmm4 paddd 0+64(%rbp),%xmm8 paddd 0+96(%rbp),%xmm12 movdqu 0 + 0(%rsi),%xmm3 movdqu 16 + 0(%rsi),%xmm7 movdqu 32 + 0(%rsi),%xmm11 movdqu 48 + 0(%rsi),%xmm15 pxor %xmm3,%xmm2 pxor %xmm7,%xmm6 pxor %xmm11,%xmm10 pxor %xmm14,%xmm15 movdqu %xmm2,0 + 0(%rdi) movdqu %xmm6,16 + 0(%rdi) movdqu %xmm10,32 + 0(%rdi) movdqu %xmm15,48 + 0(%rdi) movdqu 0 + 64(%rsi),%xmm3 movdqu 16 + 64(%rsi),%xmm7 movdqu 32 + 64(%rsi),%xmm11 movdqu 48 + 64(%rsi),%xmm15 pxor %xmm3,%xmm1 pxor %xmm7,%xmm5 pxor %xmm11,%xmm9 pxor %xmm13,%xmm15 movdqu %xmm1,0 + 64(%rdi) movdqu %xmm5,16 + 64(%rdi) movdqu %xmm9,32 + 64(%rdi) movdqu %xmm15,48 + 64(%rdi) movq $128,%rcx subq $128,%rbx leaq 128(%rsi),%rsi .Lseal_sse_128_tail_hash: cmpq $16,%rcx jb .Lseal_sse_128_tail_xor addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 subq $16,%rcx leaq 16(%rdi),%rdi jmp .Lseal_sse_128_tail_hash .Lseal_sse_128_tail_xor: cmpq $16,%rbx jb .Lseal_sse_tail_16 subq $16,%rbx movdqu 0(%rsi),%xmm3 pxor %xmm3,%xmm0 movdqu %xmm0,0(%rdi) addq 0(%rdi),%r10 adcq 8(%rdi),%r11 adcq $1,%r12 leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movdqa %xmm4,%xmm0 movdqa %xmm8,%xmm4 movdqa %xmm12,%xmm8 movdqa %xmm1,%xmm12 movdqa %xmm5,%xmm1 movdqa %xmm9,%xmm5 movdqa %xmm13,%xmm9 jmp .Lseal_sse_128_tail_xor .Lseal_sse_tail_16: testq %rbx,%rbx jz .Lprocess_blocks_of_extra_in movq %rbx,%r8 movq %rbx,%rcx leaq -1(%rsi,%rbx,1),%rsi pxor %xmm15,%xmm15 .Lseal_sse_tail_16_compose: pslldq $1,%xmm15 pinsrb $0,(%rsi),%xmm15 leaq -1(%rsi),%rsi decq %rcx jne .Lseal_sse_tail_16_compose pxor %xmm0,%xmm15 movq %rbx,%rcx movdqu %xmm15,%xmm0 .Lseal_sse_tail_16_extract: pextrb $0,%xmm0,(%rdi) psrldq $1,%xmm0 addq $1,%rdi subq $1,%rcx jnz .Lseal_sse_tail_16_extract movq 288 + 0 + 32(%rsp),%r9 movq 56(%r9),%r14 movq 48(%r9),%r13 testq %r14,%r14 jz .Lprocess_partial_block movq $16,%r15 subq %rbx,%r15 cmpq %r15,%r14 jge .Lload_extra_in movq %r14,%r15 .Lload_extra_in: leaq -1(%r13,%r15,1),%rsi addq %r15,%r13 subq %r15,%r14 movq %r13,48(%r9) movq %r14,56(%r9) addq %r15,%r8 pxor %xmm11,%xmm11 .Lload_extra_load_loop: pslldq $1,%xmm11 pinsrb $0,(%rsi),%xmm11 leaq -1(%rsi),%rsi subq $1,%r15 jnz .Lload_extra_load_loop movq %rbx,%r15 .Lload_extra_shift_loop: pslldq $1,%xmm11 subq $1,%r15 jnz .Lload_extra_shift_loop leaq .Land_masks(%rip),%r15 shlq $4,%rbx pand -16(%r15,%rbx,1),%xmm15 por %xmm11,%xmm15 .byte 102,77,15,126,253 pextrq $1,%xmm15,%r14 addq %r13,%r10 adcq %r14,%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Lprocess_blocks_of_extra_in: movq 288+32+0 (%rsp),%r9 movq 48(%r9),%rsi movq 56(%r9),%r8 movq %r8,%rcx shrq $4,%r8 .Lprocess_extra_hash_loop: jz process_extra_in_trailer addq 0+0(%rsi),%r10 adcq 8+0(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rsi),%rsi subq $1,%r8 jmp .Lprocess_extra_hash_loop process_extra_in_trailer: andq $15,%rcx movq %rcx,%rbx jz .Ldo_length_block leaq -1(%rsi,%rcx,1),%rsi .Lprocess_extra_in_trailer_load: pslldq $1,%xmm15 pinsrb $0,(%rsi),%xmm15 leaq -1(%rsi),%rsi subq $1,%rcx jnz .Lprocess_extra_in_trailer_load .Lprocess_partial_block: leaq .Land_masks(%rip),%r15 shlq $4,%rbx pand -16(%r15,%rbx,1),%xmm15 .byte 102,77,15,126,253 pextrq $1,%xmm15,%r14 addq %r13,%r10 adcq %r14,%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Ldo_length_block: addq 0+0+32(%rbp),%r10 adcq 8+0+32(%rbp),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 movq %r10,%r13 movq %r11,%r14 movq %r12,%r15 subq $-5,%r10 sbbq $-1,%r11 sbbq $3,%r12 cmovcq %r13,%r10 cmovcq %r14,%r11 cmovcq %r15,%r12 addq 0+0+16(%rbp),%r10 adcq 8+0+16(%rbp),%r11 .cfi_remember_state addq $288 + 0 + 32,%rsp .cfi_adjust_cfa_offset -(288 + 32) popq %r9 .cfi_adjust_cfa_offset -8 .cfi_restore %r9 movq %r10,(%r9) movq %r11,8(%r9) popq %r15 .cfi_adjust_cfa_offset -8 .cfi_restore %r15 popq %r14 .cfi_adjust_cfa_offset -8 .cfi_restore %r14 popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 popq %rbx .cfi_adjust_cfa_offset -8 .cfi_restore %rbx popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp .byte 0xf3,0xc3 .Lseal_sse_128: .cfi_restore_state movdqu .Lchacha20_consts(%rip),%xmm0 movdqa %xmm0,%xmm1 movdqa %xmm0,%xmm2 movdqu 0(%r9),%xmm4 movdqa %xmm4,%xmm5 movdqa %xmm4,%xmm6 movdqu 16(%r9),%xmm8 movdqa %xmm8,%xmm9 movdqa %xmm8,%xmm10 movdqu 32(%r9),%xmm14 movdqa %xmm14,%xmm12 paddd .Lsse_inc(%rip),%xmm12 movdqa %xmm12,%xmm13 paddd .Lsse_inc(%rip),%xmm13 movdqa %xmm4,%xmm7 movdqa %xmm8,%xmm11 movdqa %xmm12,%xmm15 movq $10,%r10 .Lseal_sse_128_rounds: paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,4 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,12 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,4 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,12 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,4 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,12 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol16(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $12,%xmm3 psrld $20,%xmm4 pxor %xmm3,%xmm4 paddd %xmm4,%xmm0 pxor %xmm0,%xmm12 pshufb .Lrol8(%rip),%xmm12 paddd %xmm12,%xmm8 pxor %xmm8,%xmm4 movdqa %xmm4,%xmm3 pslld $7,%xmm3 psrld $25,%xmm4 pxor %xmm3,%xmm4 .byte 102,15,58,15,228,12 .byte 102,69,15,58,15,192,8 .byte 102,69,15,58,15,228,4 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol16(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $12,%xmm3 psrld $20,%xmm5 pxor %xmm3,%xmm5 paddd %xmm5,%xmm1 pxor %xmm1,%xmm13 pshufb .Lrol8(%rip),%xmm13 paddd %xmm13,%xmm9 pxor %xmm9,%xmm5 movdqa %xmm5,%xmm3 pslld $7,%xmm3 psrld $25,%xmm5 pxor %xmm3,%xmm5 .byte 102,15,58,15,237,12 .byte 102,69,15,58,15,201,8 .byte 102,69,15,58,15,237,4 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol16(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $12,%xmm3 psrld $20,%xmm6 pxor %xmm3,%xmm6 paddd %xmm6,%xmm2 pxor %xmm2,%xmm14 pshufb .Lrol8(%rip),%xmm14 paddd %xmm14,%xmm10 pxor %xmm10,%xmm6 movdqa %xmm6,%xmm3 pslld $7,%xmm3 psrld $25,%xmm6 pxor %xmm3,%xmm6 .byte 102,15,58,15,246,12 .byte 102,69,15,58,15,210,8 .byte 102,69,15,58,15,246,4 decq %r10 jnz .Lseal_sse_128_rounds paddd .Lchacha20_consts(%rip),%xmm0 paddd .Lchacha20_consts(%rip),%xmm1 paddd .Lchacha20_consts(%rip),%xmm2 paddd %xmm7,%xmm4 paddd %xmm7,%xmm5 paddd %xmm7,%xmm6 paddd %xmm11,%xmm8 paddd %xmm11,%xmm9 paddd %xmm15,%xmm12 paddd .Lsse_inc(%rip),%xmm15 paddd %xmm15,%xmm13 pand .Lclamp(%rip),%xmm2 movdqa %xmm2,0+0(%rbp) movdqa %xmm6,0+16(%rbp) movq %r8,%r8 call poly_hash_ad_internal jmp .Lseal_sse_128_tail_xor .size chacha20_poly1305_seal, .-chacha20_poly1305_seal .cfi_endproc .type chacha20_poly1305_open_avx2,@function .align 64 chacha20_poly1305_open_avx2: .cfi_startproc .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .cfi_adjust_cfa_offset 8 .cfi_offset %r9,-64 .cfi_adjust_cfa_offset 288 + 32 vzeroupper vmovdqa .Lchacha20_consts(%rip),%ymm0 vbroadcasti128 0(%r9),%ymm4 vbroadcasti128 16(%r9),%ymm8 vbroadcasti128 32(%r9),%ymm12 vpaddd .Lavx2_init(%rip),%ymm12,%ymm12 cmpq $192,%rbx jbe .Lopen_avx2_192 cmpq $320,%rbx jbe .Lopen_avx2_320 vmovdqa %ymm4,0+64(%rbp) vmovdqa %ymm8,0+96(%rbp) vmovdqa %ymm12,0+160(%rbp) movq $10,%r10 .Lopen_avx2_init_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 decq %r10 jne .Lopen_avx2_init_rounds vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand .Lclamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 movq %r8,%r8 call poly_hash_ad_internal xorq %rcx,%rcx .Lopen_avx2_init_hash: addq 0+0(%rsi,%rcx,1),%r10 adcq 8+0(%rsi,%rcx,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq $16,%rcx cmpq $64,%rcx jne .Lopen_avx2_init_hash vpxor 0(%rsi),%ymm0,%ymm0 vpxor 32(%rsi),%ymm4,%ymm4 vmovdqu %ymm0,0(%rdi) vmovdqu %ymm4,32(%rdi) leaq 64(%rsi),%rsi leaq 64(%rdi),%rdi subq $64,%rbx .Lopen_avx2_main_loop: cmpq $512,%rbx jb .Lopen_avx2_main_loop_done vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) xorq %rcx,%rcx .Lopen_avx2_main_loop_rounds: addq 0+0(%rsi,%rcx,1),%r10 adcq 8+0(%rsi,%rcx,1),%r11 adcq $1,%r12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 addq %rax,%r15 adcq %rdx,%r9 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 addq 0+16(%rsi,%rcx,1),%r10 adcq 8+16(%rsi,%rcx,1),%r11 adcq $1,%r12 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 addq %rax,%r15 adcq %rdx,%r9 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 addq 0+32(%rsi,%rcx,1),%r10 adcq 8+32(%rsi,%rcx,1),%r11 adcq $1,%r12 leaq 48(%rcx),%rcx vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 addq %rax,%r15 adcq %rdx,%r9 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpalignr $4,%ymm12,%ymm12,%ymm12 cmpq $60*8,%rcx jne .Lopen_avx2_main_loop_rounds vpaddd .Lchacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm0,0+128(%rbp) addq 0+60*8(%rsi),%r10 adcq 8+60*8(%rsi),%r11 adcq $1,%r12 vperm2i128 $0x02,%ymm3,%ymm7,%ymm0 vperm2i128 $0x13,%ymm3,%ymm7,%ymm7 vperm2i128 $0x02,%ymm11,%ymm15,%ymm3 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vpxor 0+0(%rsi),%ymm0,%ymm0 vpxor 32+0(%rsi),%ymm3,%ymm3 vpxor 64+0(%rsi),%ymm7,%ymm7 vpxor 96+0(%rsi),%ymm11,%ymm11 vmovdqu %ymm0,0+0(%rdi) vmovdqu %ymm3,32+0(%rdi) vmovdqu %ymm7,64+0(%rdi) vmovdqu %ymm11,96+0(%rdi) vmovdqa 0+128(%rbp),%ymm0 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm2,%ymm2 vpxor 64+128(%rsi),%ymm6,%ymm6 vpxor 96+128(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm2,32+128(%rdi) vmovdqu %ymm6,64+128(%rdi) vmovdqu %ymm10,96+128(%rdi) addq 0+60*8+16(%rsi),%r10 adcq 8+60*8+16(%rsi),%r11 adcq $1,%r12 vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+256(%rsi),%ymm3,%ymm3 vpxor 32+256(%rsi),%ymm1,%ymm1 vpxor 64+256(%rsi),%ymm5,%ymm5 vpxor 96+256(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+256(%rdi) vmovdqu %ymm1,32+256(%rdi) vmovdqu %ymm5,64+256(%rdi) vmovdqu %ymm9,96+256(%rdi) movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vperm2i128 $0x13,%ymm0,%ymm4,%ymm4 vperm2i128 $0x02,%ymm8,%ymm12,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm8 vpxor 0+384(%rsi),%ymm3,%ymm3 vpxor 32+384(%rsi),%ymm0,%ymm0 vpxor 64+384(%rsi),%ymm4,%ymm4 vpxor 96+384(%rsi),%ymm8,%ymm8 vmovdqu %ymm3,0+384(%rdi) vmovdqu %ymm0,32+384(%rdi) vmovdqu %ymm4,64+384(%rdi) vmovdqu %ymm8,96+384(%rdi) leaq 512(%rsi),%rsi leaq 512(%rdi),%rdi subq $512,%rbx jmp .Lopen_avx2_main_loop .Lopen_avx2_main_loop_done: testq %rbx,%rbx vzeroupper je .Lopen_sse_finalize cmpq $384,%rbx ja .Lopen_avx2_tail_512 cmpq $256,%rbx ja .Lopen_avx2_tail_384 cmpq $128,%rbx ja .Lopen_avx2_tail_256 vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) xorq %r8,%r8 movq %rbx,%rcx andq $-16,%rcx testq %rcx,%rcx je .Lopen_avx2_tail_128_rounds .Lopen_avx2_tail_128_rounds_and_x1hash: addq 0+0(%rsi,%r8,1),%r10 adcq 8+0(%rsi,%r8,1),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Lopen_avx2_tail_128_rounds: addq $16,%r8 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 cmpq %rcx,%r8 jb .Lopen_avx2_tail_128_rounds_and_x1hash cmpq $160,%r8 jne .Lopen_avx2_tail_128_rounds vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 jmp .Lopen_avx2_tail_128_xor .Lopen_avx2_tail_256: vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) movq %rbx,0+128(%rbp) movq %rbx,%rcx subq $128,%rcx shrq $4,%rcx movq $10,%r8 cmpq $10,%rcx cmovgq %r8,%rcx movq %rsi,%rbx xorq %r8,%r8 .Lopen_avx2_tail_256_rounds_and_x1hash: addq 0+0(%rbx),%r10 adcq 8+0(%rbx),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rbx),%rbx .Lopen_avx2_tail_256_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 incq %r8 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 cmpq %rcx,%r8 jb .Lopen_avx2_tail_256_rounds_and_x1hash cmpq $10,%r8 jne .Lopen_avx2_tail_256_rounds movq %rbx,%r8 subq %rsi,%rbx movq %rbx,%rcx movq 0+128(%rbp),%rbx .Lopen_avx2_tail_256_hash: addq $16,%rcx cmpq %rbx,%rcx jg .Lopen_avx2_tail_256_done addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 jmp .Lopen_avx2_tail_256_hash .Lopen_avx2_tail_256_done: vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+0(%rsi),%ymm3,%ymm3 vpxor 32+0(%rsi),%ymm1,%ymm1 vpxor 64+0(%rsi),%ymm5,%ymm5 vpxor 96+0(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+0(%rdi) vmovdqu %ymm1,32+0(%rdi) vmovdqu %ymm5,64+0(%rdi) vmovdqu %ymm9,96+0(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 leaq 128(%rsi),%rsi leaq 128(%rdi),%rdi subq $128,%rbx jmp .Lopen_avx2_tail_128_xor .Lopen_avx2_tail_384: vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) movq %rbx,0+128(%rbp) movq %rbx,%rcx subq $256,%rcx shrq $4,%rcx addq $6,%rcx movq $10,%r8 cmpq $10,%rcx cmovgq %r8,%rcx movq %rsi,%rbx xorq %r8,%r8 .Lopen_avx2_tail_384_rounds_and_x2hash: addq 0+0(%rbx),%r10 adcq 8+0(%rbx),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rbx),%rbx .Lopen_avx2_tail_384_rounds_and_x1hash: vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm6,%ymm6,%ymm6 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 addq 0+0(%rbx),%r10 adcq 8+0(%rbx),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rbx),%rbx incq %r8 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 cmpq %rcx,%r8 jb .Lopen_avx2_tail_384_rounds_and_x2hash cmpq $10,%r8 jne .Lopen_avx2_tail_384_rounds_and_x1hash movq %rbx,%r8 subq %rsi,%rbx movq %rbx,%rcx movq 0+128(%rbp),%rbx .Lopen_avx2_384_tail_hash: addq $16,%rcx cmpq %rbx,%rcx jg .Lopen_avx2_384_tail_done addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 jmp .Lopen_avx2_384_tail_hash .Lopen_avx2_384_tail_done: vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+0(%rsi),%ymm3,%ymm3 vpxor 32+0(%rsi),%ymm2,%ymm2 vpxor 64+0(%rsi),%ymm6,%ymm6 vpxor 96+0(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+0(%rdi) vmovdqu %ymm2,32+0(%rdi) vmovdqu %ymm6,64+0(%rdi) vmovdqu %ymm10,96+0(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm1,%ymm1 vpxor 64+128(%rsi),%ymm5,%ymm5 vpxor 96+128(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm1,32+128(%rdi) vmovdqu %ymm5,64+128(%rdi) vmovdqu %ymm9,96+128(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 leaq 256(%rsi),%rsi leaq 256(%rdi),%rdi subq $256,%rbx jmp .Lopen_avx2_tail_128_xor .Lopen_avx2_tail_512: vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) xorq %rcx,%rcx movq %rsi,%r8 .Lopen_avx2_tail_512_rounds_and_x2hash: addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 .Lopen_avx2_tail_512_rounds_and_x1hash: vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 addq 0+16(%r8),%r10 adcq 8+16(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%r8),%r8 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm12,%ymm12,%ymm12 incq %rcx cmpq $4,%rcx jl .Lopen_avx2_tail_512_rounds_and_x2hash cmpq $10,%rcx jne .Lopen_avx2_tail_512_rounds_and_x1hash movq %rbx,%rcx subq $384,%rcx andq $-16,%rcx .Lopen_avx2_tail_512_hash: testq %rcx,%rcx je .Lopen_avx2_tail_512_done addq 0+0(%r8),%r10 adcq 8+0(%r8),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%r8),%r8 subq $16,%rcx jmp .Lopen_avx2_tail_512_hash .Lopen_avx2_tail_512_done: vpaddd .Lchacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm0,0+128(%rbp) vperm2i128 $0x02,%ymm3,%ymm7,%ymm0 vperm2i128 $0x13,%ymm3,%ymm7,%ymm7 vperm2i128 $0x02,%ymm11,%ymm15,%ymm3 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vpxor 0+0(%rsi),%ymm0,%ymm0 vpxor 32+0(%rsi),%ymm3,%ymm3 vpxor 64+0(%rsi),%ymm7,%ymm7 vpxor 96+0(%rsi),%ymm11,%ymm11 vmovdqu %ymm0,0+0(%rdi) vmovdqu %ymm3,32+0(%rdi) vmovdqu %ymm7,64+0(%rdi) vmovdqu %ymm11,96+0(%rdi) vmovdqa 0+128(%rbp),%ymm0 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm2,%ymm2 vpxor 64+128(%rsi),%ymm6,%ymm6 vpxor 96+128(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm2,32+128(%rdi) vmovdqu %ymm6,64+128(%rdi) vmovdqu %ymm10,96+128(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+256(%rsi),%ymm3,%ymm3 vpxor 32+256(%rsi),%ymm1,%ymm1 vpxor 64+256(%rsi),%ymm5,%ymm5 vpxor 96+256(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+256(%rdi) vmovdqu %ymm1,32+256(%rdi) vmovdqu %ymm5,64+256(%rdi) vmovdqu %ymm9,96+256(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 leaq 384(%rsi),%rsi leaq 384(%rdi),%rdi subq $384,%rbx .Lopen_avx2_tail_128_xor: cmpq $32,%rbx jb .Lopen_avx2_tail_32_xor subq $32,%rbx vpxor (%rsi),%ymm0,%ymm0 vmovdqu %ymm0,(%rdi) leaq 32(%rsi),%rsi leaq 32(%rdi),%rdi vmovdqa %ymm4,%ymm0 vmovdqa %ymm8,%ymm4 vmovdqa %ymm12,%ymm8 jmp .Lopen_avx2_tail_128_xor .Lopen_avx2_tail_32_xor: cmpq $16,%rbx vmovdqa %xmm0,%xmm1 jb .Lopen_avx2_exit subq $16,%rbx vpxor (%rsi),%xmm0,%xmm1 vmovdqu %xmm1,(%rdi) leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi vperm2i128 $0x11,%ymm0,%ymm0,%ymm0 vmovdqa %xmm0,%xmm1 .Lopen_avx2_exit: vzeroupper jmp .Lopen_sse_tail_16 .Lopen_avx2_192: vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vpaddd .Lavx2_inc(%rip),%ymm12,%ymm13 vmovdqa %ymm12,%ymm11 vmovdqa %ymm13,%ymm15 movq $10,%r10 .Lopen_avx2_192_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 decq %r10 jne .Lopen_avx2_192_rounds vpaddd %ymm2,%ymm0,%ymm0 vpaddd %ymm2,%ymm1,%ymm1 vpaddd %ymm6,%ymm4,%ymm4 vpaddd %ymm6,%ymm5,%ymm5 vpaddd %ymm10,%ymm8,%ymm8 vpaddd %ymm10,%ymm9,%ymm9 vpaddd %ymm11,%ymm12,%ymm12 vpaddd %ymm15,%ymm13,%ymm13 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand .Lclamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 vperm2i128 $0x02,%ymm1,%ymm5,%ymm8 vperm2i128 $0x02,%ymm9,%ymm13,%ymm12 vperm2i128 $0x13,%ymm1,%ymm5,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm5 .Lopen_avx2_short: movq %r8,%r8 call poly_hash_ad_internal .Lopen_avx2_short_hash_and_xor_loop: cmpq $32,%rbx jb .Lopen_avx2_short_tail_32 subq $32,%rbx addq 0+0(%rsi),%r10 adcq 8+0(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq 0+16(%rsi),%r10 adcq 8+16(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor (%rsi),%ymm0,%ymm0 vmovdqu %ymm0,(%rdi) leaq 32(%rsi),%rsi leaq 32(%rdi),%rdi vmovdqa %ymm4,%ymm0 vmovdqa %ymm8,%ymm4 vmovdqa %ymm12,%ymm8 vmovdqa %ymm1,%ymm12 vmovdqa %ymm5,%ymm1 vmovdqa %ymm9,%ymm5 vmovdqa %ymm13,%ymm9 vmovdqa %ymm2,%ymm13 vmovdqa %ymm6,%ymm2 jmp .Lopen_avx2_short_hash_and_xor_loop .Lopen_avx2_short_tail_32: cmpq $16,%rbx vmovdqa %xmm0,%xmm1 jb .Lopen_avx2_short_tail_32_exit subq $16,%rbx addq 0+0(%rsi),%r10 adcq 8+0(%rsi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor (%rsi),%xmm0,%xmm3 vmovdqu %xmm3,(%rdi) leaq 16(%rsi),%rsi leaq 16(%rdi),%rdi vextracti128 $1,%ymm0,%xmm1 .Lopen_avx2_short_tail_32_exit: vzeroupper jmp .Lopen_sse_tail_16 .Lopen_avx2_320: vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vpaddd .Lavx2_inc(%rip),%ymm12,%ymm13 vpaddd .Lavx2_inc(%rip),%ymm13,%ymm14 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) movq $10,%r10 .Lopen_avx2_320_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm6,%ymm6,%ymm6 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 decq %r10 jne .Lopen_avx2_320_rounds vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd %ymm7,%ymm4,%ymm4 vpaddd %ymm7,%ymm5,%ymm5 vpaddd %ymm7,%ymm6,%ymm6 vpaddd %ymm11,%ymm8,%ymm8 vpaddd %ymm11,%ymm9,%ymm9 vpaddd %ymm11,%ymm10,%ymm10 vpaddd 0+160(%rbp),%ymm12,%ymm12 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd 0+224(%rbp),%ymm14,%ymm14 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand .Lclamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 vperm2i128 $0x02,%ymm1,%ymm5,%ymm8 vperm2i128 $0x02,%ymm9,%ymm13,%ymm12 vperm2i128 $0x13,%ymm1,%ymm5,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm5 vperm2i128 $0x02,%ymm2,%ymm6,%ymm9 vperm2i128 $0x02,%ymm10,%ymm14,%ymm13 vperm2i128 $0x13,%ymm2,%ymm6,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm6 jmp .Lopen_avx2_short .size chacha20_poly1305_open_avx2, .-chacha20_poly1305_open_avx2 .cfi_endproc .type chacha20_poly1305_seal_avx2,@function .align 64 chacha20_poly1305_seal_avx2: .cfi_startproc .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-16 .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-24 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 .cfi_adjust_cfa_offset 8 .cfi_offset %r9,-64 .cfi_adjust_cfa_offset 288 + 32 vzeroupper vmovdqa .Lchacha20_consts(%rip),%ymm0 vbroadcasti128 0(%r9),%ymm4 vbroadcasti128 16(%r9),%ymm8 vbroadcasti128 32(%r9),%ymm12 vpaddd .Lavx2_init(%rip),%ymm12,%ymm12 cmpq $192,%rbx jbe .Lseal_avx2_192 cmpq $320,%rbx jbe .Lseal_avx2_320 vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm4,%ymm7 vmovdqa %ymm4,0+64(%rbp) vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vmovdqa %ymm8,%ymm11 vmovdqa %ymm8,0+96(%rbp) vmovdqa %ymm12,%ymm15 vpaddd .Lavx2_inc(%rip),%ymm15,%ymm14 vpaddd .Lavx2_inc(%rip),%ymm14,%ymm13 vpaddd .Lavx2_inc(%rip),%ymm13,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm15,0+256(%rbp) movq $10,%r10 .Lseal_avx2_init_rounds: vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm12,%ymm12,%ymm12 decq %r10 jnz .Lseal_avx2_init_rounds vpaddd .Lchacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vperm2i128 $0x02,%ymm3,%ymm7,%ymm15 vperm2i128 $0x13,%ymm3,%ymm7,%ymm3 vpand .Lclamp(%rip),%ymm15,%ymm15 vmovdqa %ymm15,0+0(%rbp) movq %r8,%r8 call poly_hash_ad_internal vpxor 0(%rsi),%ymm3,%ymm3 vpxor 32(%rsi),%ymm11,%ymm11 vmovdqu %ymm3,0(%rdi) vmovdqu %ymm11,32(%rdi) vperm2i128 $0x02,%ymm2,%ymm6,%ymm15 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+64(%rsi),%ymm15,%ymm15 vpxor 32+64(%rsi),%ymm2,%ymm2 vpxor 64+64(%rsi),%ymm6,%ymm6 vpxor 96+64(%rsi),%ymm10,%ymm10 vmovdqu %ymm15,0+64(%rdi) vmovdqu %ymm2,32+64(%rdi) vmovdqu %ymm6,64+64(%rdi) vmovdqu %ymm10,96+64(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm15 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+192(%rsi),%ymm15,%ymm15 vpxor 32+192(%rsi),%ymm1,%ymm1 vpxor 64+192(%rsi),%ymm5,%ymm5 vpxor 96+192(%rsi),%ymm9,%ymm9 vmovdqu %ymm15,0+192(%rdi) vmovdqu %ymm1,32+192(%rdi) vmovdqu %ymm5,64+192(%rdi) vmovdqu %ymm9,96+192(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm15 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm15,%ymm8 leaq 320(%rsi),%rsi subq $320,%rbx movq $320,%rcx cmpq $128,%rbx jbe .Lseal_avx2_short_hash_remainder vpxor 0(%rsi),%ymm0,%ymm0 vpxor 32(%rsi),%ymm4,%ymm4 vpxor 64(%rsi),%ymm8,%ymm8 vpxor 96(%rsi),%ymm12,%ymm12 vmovdqu %ymm0,320(%rdi) vmovdqu %ymm4,352(%rdi) vmovdqu %ymm8,384(%rdi) vmovdqu %ymm12,416(%rdi) leaq 128(%rsi),%rsi subq $128,%rbx movq $8,%rcx movq $2,%r8 cmpq $128,%rbx jbe .Lseal_avx2_tail_128 cmpq $256,%rbx jbe .Lseal_avx2_tail_256 cmpq $384,%rbx jbe .Lseal_avx2_tail_384 cmpq $512,%rbx jbe .Lseal_avx2_tail_512 vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 subq $16,%rdi movq $9,%rcx jmp .Lseal_avx2_main_loop_rounds_entry .align 32 .Lseal_avx2_main_loop: vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) movq $10,%rcx .align 32 .Lseal_avx2_main_loop_rounds: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 addq %rax,%r15 adcq %rdx,%r9 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 .Lseal_avx2_main_loop_rounds_entry: vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 addq %rax,%r15 adcq %rdx,%r9 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 addq 0+32(%rdi),%r10 adcq 8+32(%rdi),%r11 adcq $1,%r12 leaq 48(%rdi),%rdi vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 addq %rax,%r15 adcq %rdx,%r9 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpalignr $4,%ymm12,%ymm12,%ymm12 decq %rcx jne .Lseal_avx2_main_loop_rounds vpaddd .Lchacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm0,0+128(%rbp) addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi vperm2i128 $0x02,%ymm3,%ymm7,%ymm0 vperm2i128 $0x13,%ymm3,%ymm7,%ymm7 vperm2i128 $0x02,%ymm11,%ymm15,%ymm3 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vpxor 0+0(%rsi),%ymm0,%ymm0 vpxor 32+0(%rsi),%ymm3,%ymm3 vpxor 64+0(%rsi),%ymm7,%ymm7 vpxor 96+0(%rsi),%ymm11,%ymm11 vmovdqu %ymm0,0+0(%rdi) vmovdqu %ymm3,32+0(%rdi) vmovdqu %ymm7,64+0(%rdi) vmovdqu %ymm11,96+0(%rdi) vmovdqa 0+128(%rbp),%ymm0 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm2,%ymm2 vpxor 64+128(%rsi),%ymm6,%ymm6 vpxor 96+128(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm2,32+128(%rdi) vmovdqu %ymm6,64+128(%rdi) vmovdqu %ymm10,96+128(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+256(%rsi),%ymm3,%ymm3 vpxor 32+256(%rsi),%ymm1,%ymm1 vpxor 64+256(%rsi),%ymm5,%ymm5 vpxor 96+256(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+256(%rdi) vmovdqu %ymm1,32+256(%rdi) vmovdqu %ymm5,64+256(%rdi) vmovdqu %ymm9,96+256(%rdi) vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vperm2i128 $0x13,%ymm0,%ymm4,%ymm4 vperm2i128 $0x02,%ymm8,%ymm12,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm8 vpxor 0+384(%rsi),%ymm3,%ymm3 vpxor 32+384(%rsi),%ymm0,%ymm0 vpxor 64+384(%rsi),%ymm4,%ymm4 vpxor 96+384(%rsi),%ymm8,%ymm8 vmovdqu %ymm3,0+384(%rdi) vmovdqu %ymm0,32+384(%rdi) vmovdqu %ymm4,64+384(%rdi) vmovdqu %ymm8,96+384(%rdi) leaq 512(%rsi),%rsi subq $512,%rbx cmpq $512,%rbx jg .Lseal_avx2_main_loop addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi movq $10,%rcx xorq %r8,%r8 cmpq $384,%rbx ja .Lseal_avx2_tail_512 cmpq $256,%rbx ja .Lseal_avx2_tail_384 cmpq $128,%rbx ja .Lseal_avx2_tail_256 .Lseal_avx2_tail_128: vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) .Lseal_avx2_tail_128_rounds_and_3xhash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi .Lseal_avx2_tail_128_rounds_and_2xhash: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi decq %rcx jg .Lseal_avx2_tail_128_rounds_and_3xhash decq %r8 jge .Lseal_avx2_tail_128_rounds_and_2xhash vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 jmp .Lseal_avx2_short_loop .Lseal_avx2_tail_256: vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) .Lseal_avx2_tail_256_rounds_and_3xhash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi .Lseal_avx2_tail_256_rounds_and_2xhash: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi decq %rcx jg .Lseal_avx2_tail_256_rounds_and_3xhash decq %r8 jge .Lseal_avx2_tail_256_rounds_and_2xhash vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+0(%rsi),%ymm3,%ymm3 vpxor 32+0(%rsi),%ymm1,%ymm1 vpxor 64+0(%rsi),%ymm5,%ymm5 vpxor 96+0(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+0(%rdi) vmovdqu %ymm1,32+0(%rdi) vmovdqu %ymm5,64+0(%rdi) vmovdqu %ymm9,96+0(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 movq $128,%rcx leaq 128(%rsi),%rsi subq $128,%rbx jmp .Lseal_avx2_short_hash_remainder .Lseal_avx2_tail_384: vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) .Lseal_avx2_tail_384_rounds_and_3xhash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi .Lseal_avx2_tail_384_rounds_and_2xhash: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm6,%ymm6,%ymm6 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 leaq 32(%rdi),%rdi decq %rcx jg .Lseal_avx2_tail_384_rounds_and_3xhash decq %r8 jge .Lseal_avx2_tail_384_rounds_and_2xhash vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+0(%rsi),%ymm3,%ymm3 vpxor 32+0(%rsi),%ymm2,%ymm2 vpxor 64+0(%rsi),%ymm6,%ymm6 vpxor 96+0(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+0(%rdi) vmovdqu %ymm2,32+0(%rdi) vmovdqu %ymm6,64+0(%rdi) vmovdqu %ymm10,96+0(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm1,%ymm1 vpxor 64+128(%rsi),%ymm5,%ymm5 vpxor 96+128(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm1,32+128(%rdi) vmovdqu %ymm5,64+128(%rdi) vmovdqu %ymm9,96+128(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 movq $256,%rcx leaq 256(%rsi),%rsi subq $256,%rbx jmp .Lseal_avx2_short_hash_remainder .Lseal_avx2_tail_512: vmovdqa .Lchacha20_consts(%rip),%ymm0 vmovdqa 0+64(%rbp),%ymm4 vmovdqa 0+96(%rbp),%ymm8 vmovdqa %ymm0,%ymm1 vmovdqa %ymm4,%ymm5 vmovdqa %ymm8,%ymm9 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm10 vmovdqa %ymm0,%ymm3 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa .Lavx2_inc(%rip),%ymm12 vpaddd 0+160(%rbp),%ymm12,%ymm15 vpaddd %ymm15,%ymm12,%ymm14 vpaddd %ymm14,%ymm12,%ymm13 vpaddd %ymm13,%ymm12,%ymm12 vmovdqa %ymm15,0+256(%rbp) vmovdqa %ymm14,0+224(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm12,0+160(%rbp) .Lseal_avx2_tail_512_rounds_and_3xhash: addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi .Lseal_avx2_tail_512_rounds_and_2xhash: vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $4,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $12,%ymm15,%ymm15,%ymm15 vpalignr $4,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $4,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $4,%ymm4,%ymm4,%ymm4 addq %rax,%r15 adcq %rdx,%r9 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm12,%ymm12,%ymm12 vmovdqa %ymm8,0+128(%rbp) vmovdqa .Lrol16(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $20,%ymm7,%ymm8 vpslld $32-20,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $20,%ymm6,%ymm8 vpslld $32-20,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $20,%ymm5,%ymm8 vpslld $32-20,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $20,%ymm4,%ymm8 vpslld $32-20,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa .Lrol8(%rip),%ymm8 vpaddd %ymm7,%ymm3,%ymm3 vpaddd %ymm6,%ymm2,%ymm2 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 vpaddd %ymm5,%ymm1,%ymm1 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm3,%ymm15,%ymm15 vpxor %ymm2,%ymm14,%ymm14 vpxor %ymm1,%ymm13,%ymm13 vpxor %ymm0,%ymm12,%ymm12 vpshufb %ymm8,%ymm15,%ymm15 vpshufb %ymm8,%ymm14,%ymm14 vpshufb %ymm8,%ymm13,%ymm13 vpshufb %ymm8,%ymm12,%ymm12 vpaddd %ymm15,%ymm11,%ymm11 vpaddd %ymm14,%ymm10,%ymm10 vpaddd %ymm13,%ymm9,%ymm9 vpaddd 0+128(%rbp),%ymm12,%ymm8 vpxor %ymm11,%ymm7,%ymm7 vpxor %ymm10,%ymm6,%ymm6 vpxor %ymm9,%ymm5,%ymm5 vpxor %ymm8,%ymm4,%ymm4 vmovdqa %ymm8,0+128(%rbp) vpsrld $25,%ymm7,%ymm8 movq 0+0+0(%rbp),%rdx movq %rdx,%r15 mulxq %r10,%r13,%r14 mulxq %r11,%rax,%rdx imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 vpslld $32-25,%ymm7,%ymm7 vpxor %ymm8,%ymm7,%ymm7 vpsrld $25,%ymm6,%ymm8 vpslld $32-25,%ymm6,%ymm6 vpxor %ymm8,%ymm6,%ymm6 vpsrld $25,%ymm5,%ymm8 vpslld $32-25,%ymm5,%ymm5 vpxor %ymm8,%ymm5,%ymm5 vpsrld $25,%ymm4,%ymm8 vpslld $32-25,%ymm4,%ymm4 vpxor %ymm8,%ymm4,%ymm4 vmovdqa 0+128(%rbp),%ymm8 vpalignr $12,%ymm7,%ymm7,%ymm7 vpalignr $8,%ymm11,%ymm11,%ymm11 vpalignr $4,%ymm15,%ymm15,%ymm15 vpalignr $12,%ymm6,%ymm6,%ymm6 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $12,%ymm5,%ymm5,%ymm5 vpalignr $8,%ymm9,%ymm9,%ymm9 movq 8+0+0(%rbp),%rdx mulxq %r10,%r10,%rax addq %r10,%r14 mulxq %r11,%r11,%r9 adcq %r11,%r15 adcq $0,%r9 imulq %r12,%rdx vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $12,%ymm4,%ymm4,%ymm4 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm12,%ymm12,%ymm12 addq %rax,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi decq %rcx jg .Lseal_avx2_tail_512_rounds_and_3xhash decq %r8 jge .Lseal_avx2_tail_512_rounds_and_2xhash vpaddd .Lchacha20_consts(%rip),%ymm3,%ymm3 vpaddd 0+64(%rbp),%ymm7,%ymm7 vpaddd 0+96(%rbp),%ymm11,%ymm11 vpaddd 0+256(%rbp),%ymm15,%ymm15 vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd 0+64(%rbp),%ymm6,%ymm6 vpaddd 0+96(%rbp),%ymm10,%ymm10 vpaddd 0+224(%rbp),%ymm14,%ymm14 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd 0+64(%rbp),%ymm5,%ymm5 vpaddd 0+96(%rbp),%ymm9,%ymm9 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd 0+64(%rbp),%ymm4,%ymm4 vpaddd 0+96(%rbp),%ymm8,%ymm8 vpaddd 0+160(%rbp),%ymm12,%ymm12 vmovdqa %ymm0,0+128(%rbp) vperm2i128 $0x02,%ymm3,%ymm7,%ymm0 vperm2i128 $0x13,%ymm3,%ymm7,%ymm7 vperm2i128 $0x02,%ymm11,%ymm15,%ymm3 vperm2i128 $0x13,%ymm11,%ymm15,%ymm11 vpxor 0+0(%rsi),%ymm0,%ymm0 vpxor 32+0(%rsi),%ymm3,%ymm3 vpxor 64+0(%rsi),%ymm7,%ymm7 vpxor 96+0(%rsi),%ymm11,%ymm11 vmovdqu %ymm0,0+0(%rdi) vmovdqu %ymm3,32+0(%rdi) vmovdqu %ymm7,64+0(%rdi) vmovdqu %ymm11,96+0(%rdi) vmovdqa 0+128(%rbp),%ymm0 vperm2i128 $0x02,%ymm2,%ymm6,%ymm3 vperm2i128 $0x13,%ymm2,%ymm6,%ymm6 vperm2i128 $0x02,%ymm10,%ymm14,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm10 vpxor 0+128(%rsi),%ymm3,%ymm3 vpxor 32+128(%rsi),%ymm2,%ymm2 vpxor 64+128(%rsi),%ymm6,%ymm6 vpxor 96+128(%rsi),%ymm10,%ymm10 vmovdqu %ymm3,0+128(%rdi) vmovdqu %ymm2,32+128(%rdi) vmovdqu %ymm6,64+128(%rdi) vmovdqu %ymm10,96+128(%rdi) vperm2i128 $0x02,%ymm1,%ymm5,%ymm3 vperm2i128 $0x13,%ymm1,%ymm5,%ymm5 vperm2i128 $0x02,%ymm9,%ymm13,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm9 vpxor 0+256(%rsi),%ymm3,%ymm3 vpxor 32+256(%rsi),%ymm1,%ymm1 vpxor 64+256(%rsi),%ymm5,%ymm5 vpxor 96+256(%rsi),%ymm9,%ymm9 vmovdqu %ymm3,0+256(%rdi) vmovdqu %ymm1,32+256(%rdi) vmovdqu %ymm5,64+256(%rdi) vmovdqu %ymm9,96+256(%rdi) vperm2i128 $0x13,%ymm0,%ymm4,%ymm3 vperm2i128 $0x02,%ymm0,%ymm4,%ymm0 vperm2i128 $0x02,%ymm8,%ymm12,%ymm4 vperm2i128 $0x13,%ymm8,%ymm12,%ymm12 vmovdqa %ymm3,%ymm8 movq $384,%rcx leaq 384(%rsi),%rsi subq $384,%rbx jmp .Lseal_avx2_short_hash_remainder .Lseal_avx2_320: vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vpaddd .Lavx2_inc(%rip),%ymm12,%ymm13 vpaddd .Lavx2_inc(%rip),%ymm13,%ymm14 vmovdqa %ymm4,%ymm7 vmovdqa %ymm8,%ymm11 vmovdqa %ymm12,0+160(%rbp) vmovdqa %ymm13,0+192(%rbp) vmovdqa %ymm14,0+224(%rbp) movq $10,%r10 .Lseal_avx2_320_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $12,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $4,%ymm6,%ymm6,%ymm6 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol16(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpsrld $20,%ymm6,%ymm3 vpslld $12,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpaddd %ymm6,%ymm2,%ymm2 vpxor %ymm2,%ymm14,%ymm14 vpshufb .Lrol8(%rip),%ymm14,%ymm14 vpaddd %ymm14,%ymm10,%ymm10 vpxor %ymm10,%ymm6,%ymm6 vpslld $7,%ymm6,%ymm3 vpsrld $25,%ymm6,%ymm6 vpxor %ymm3,%ymm6,%ymm6 vpalignr $4,%ymm14,%ymm14,%ymm14 vpalignr $8,%ymm10,%ymm10,%ymm10 vpalignr $12,%ymm6,%ymm6,%ymm6 decq %r10 jne .Lseal_avx2_320_rounds vpaddd .Lchacha20_consts(%rip),%ymm0,%ymm0 vpaddd .Lchacha20_consts(%rip),%ymm1,%ymm1 vpaddd .Lchacha20_consts(%rip),%ymm2,%ymm2 vpaddd %ymm7,%ymm4,%ymm4 vpaddd %ymm7,%ymm5,%ymm5 vpaddd %ymm7,%ymm6,%ymm6 vpaddd %ymm11,%ymm8,%ymm8 vpaddd %ymm11,%ymm9,%ymm9 vpaddd %ymm11,%ymm10,%ymm10 vpaddd 0+160(%rbp),%ymm12,%ymm12 vpaddd 0+192(%rbp),%ymm13,%ymm13 vpaddd 0+224(%rbp),%ymm14,%ymm14 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand .Lclamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 vperm2i128 $0x02,%ymm1,%ymm5,%ymm8 vperm2i128 $0x02,%ymm9,%ymm13,%ymm12 vperm2i128 $0x13,%ymm1,%ymm5,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm5 vperm2i128 $0x02,%ymm2,%ymm6,%ymm9 vperm2i128 $0x02,%ymm10,%ymm14,%ymm13 vperm2i128 $0x13,%ymm2,%ymm6,%ymm2 vperm2i128 $0x13,%ymm10,%ymm14,%ymm6 jmp .Lseal_avx2_short .Lseal_avx2_192: vmovdqa %ymm0,%ymm1 vmovdqa %ymm0,%ymm2 vmovdqa %ymm4,%ymm5 vmovdqa %ymm4,%ymm6 vmovdqa %ymm8,%ymm9 vmovdqa %ymm8,%ymm10 vpaddd .Lavx2_inc(%rip),%ymm12,%ymm13 vmovdqa %ymm12,%ymm11 vmovdqa %ymm13,%ymm15 movq $10,%r10 .Lseal_avx2_192_rounds: vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $12,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $4,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $12,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $4,%ymm5,%ymm5,%ymm5 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol16(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpsrld $20,%ymm4,%ymm3 vpslld $12,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpaddd %ymm4,%ymm0,%ymm0 vpxor %ymm0,%ymm12,%ymm12 vpshufb .Lrol8(%rip),%ymm12,%ymm12 vpaddd %ymm12,%ymm8,%ymm8 vpxor %ymm8,%ymm4,%ymm4 vpslld $7,%ymm4,%ymm3 vpsrld $25,%ymm4,%ymm4 vpxor %ymm3,%ymm4,%ymm4 vpalignr $4,%ymm12,%ymm12,%ymm12 vpalignr $8,%ymm8,%ymm8,%ymm8 vpalignr $12,%ymm4,%ymm4,%ymm4 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol16(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpsrld $20,%ymm5,%ymm3 vpslld $12,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpaddd %ymm5,%ymm1,%ymm1 vpxor %ymm1,%ymm13,%ymm13 vpshufb .Lrol8(%rip),%ymm13,%ymm13 vpaddd %ymm13,%ymm9,%ymm9 vpxor %ymm9,%ymm5,%ymm5 vpslld $7,%ymm5,%ymm3 vpsrld $25,%ymm5,%ymm5 vpxor %ymm3,%ymm5,%ymm5 vpalignr $4,%ymm13,%ymm13,%ymm13 vpalignr $8,%ymm9,%ymm9,%ymm9 vpalignr $12,%ymm5,%ymm5,%ymm5 decq %r10 jne .Lseal_avx2_192_rounds vpaddd %ymm2,%ymm0,%ymm0 vpaddd %ymm2,%ymm1,%ymm1 vpaddd %ymm6,%ymm4,%ymm4 vpaddd %ymm6,%ymm5,%ymm5 vpaddd %ymm10,%ymm8,%ymm8 vpaddd %ymm10,%ymm9,%ymm9 vpaddd %ymm11,%ymm12,%ymm12 vpaddd %ymm15,%ymm13,%ymm13 vperm2i128 $0x02,%ymm0,%ymm4,%ymm3 vpand .Lclamp(%rip),%ymm3,%ymm3 vmovdqa %ymm3,0+0(%rbp) vperm2i128 $0x13,%ymm0,%ymm4,%ymm0 vperm2i128 $0x13,%ymm8,%ymm12,%ymm4 vperm2i128 $0x02,%ymm1,%ymm5,%ymm8 vperm2i128 $0x02,%ymm9,%ymm13,%ymm12 vperm2i128 $0x13,%ymm1,%ymm5,%ymm1 vperm2i128 $0x13,%ymm9,%ymm13,%ymm5 .Lseal_avx2_short: movq %r8,%r8 call poly_hash_ad_internal xorq %rcx,%rcx .Lseal_avx2_short_hash_remainder: cmpq $16,%rcx jb .Lseal_avx2_short_loop addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 subq $16,%rcx addq $16,%rdi jmp .Lseal_avx2_short_hash_remainder .Lseal_avx2_short_loop: cmpq $32,%rbx jb .Lseal_avx2_short_tail subq $32,%rbx vpxor (%rsi),%ymm0,%ymm0 vmovdqu %ymm0,(%rdi) leaq 32(%rsi),%rsi addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 addq 0+16(%rdi),%r10 adcq 8+16(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 32(%rdi),%rdi vmovdqa %ymm4,%ymm0 vmovdqa %ymm8,%ymm4 vmovdqa %ymm12,%ymm8 vmovdqa %ymm1,%ymm12 vmovdqa %ymm5,%ymm1 vmovdqa %ymm9,%ymm5 vmovdqa %ymm13,%ymm9 vmovdqa %ymm2,%ymm13 vmovdqa %ymm6,%ymm2 jmp .Lseal_avx2_short_loop .Lseal_avx2_short_tail: cmpq $16,%rbx jb .Lseal_avx2_exit subq $16,%rbx vpxor (%rsi),%xmm0,%xmm3 vmovdqu %xmm3,(%rdi) leaq 16(%rsi),%rsi addq 0+0(%rdi),%r10 adcq 8+0(%rdi),%r11 adcq $1,%r12 movq 0+0+0(%rbp),%rax movq %rax,%r15 mulq %r10 movq %rax,%r13 movq %rdx,%r14 movq 0+0+0(%rbp),%rax mulq %r11 imulq %r12,%r15 addq %rax,%r14 adcq %rdx,%r15 movq 8+0+0(%rbp),%rax movq %rax,%r9 mulq %r10 addq %rax,%r14 adcq $0,%rdx movq %rdx,%r10 movq 8+0+0(%rbp),%rax mulq %r11 addq %rax,%r15 adcq $0,%rdx imulq %r12,%r9 addq %r10,%r15 adcq %rdx,%r9 movq %r13,%r10 movq %r14,%r11 movq %r15,%r12 andq $3,%r12 movq %r15,%r13 andq $-4,%r13 movq %r9,%r14 shrdq $2,%r9,%r15 shrq $2,%r9 addq %r13,%r15 adcq %r14,%r9 addq %r15,%r10 adcq %r9,%r11 adcq $0,%r12 leaq 16(%rdi),%rdi vextracti128 $1,%ymm0,%xmm0 .Lseal_avx2_exit: vzeroupper jmp .Lseal_sse_tail_16 .cfi_endproc .size chacha20_poly1305_seal_avx2, .-chacha20_poly1305_seal_avx2 #endif
marvin-hansen/iggy-streaming-system
91,466
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/cipher_extra/aesni-sha256-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .extern OPENSSL_ia32cap_P .hidden OPENSSL_ia32cap_P .globl aesni_cbc_sha256_enc .hidden aesni_cbc_sha256_enc .type aesni_cbc_sha256_enc,@function .align 16 aesni_cbc_sha256_enc: .cfi_startproc leaq OPENSSL_ia32cap_P(%rip),%r11 movl $1,%eax cmpq $0,%rdi je .Lprobe movl 0(%r11),%eax movq 4(%r11),%r10 btq $61,%r10 jc aesni_cbc_sha256_enc_shaext movq %r10,%r11 shrq $32,%r11 testl $2048,%r10d jnz aesni_cbc_sha256_enc_xop andl $296,%r11d cmpl $296,%r11d je aesni_cbc_sha256_enc_avx2 andl $268435456,%r10d jnz aesni_cbc_sha256_enc_avx ud2 xorl %eax,%eax cmpq $0,%rdi je .Lprobe ud2 .Lprobe: .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc .section .rodata .align 64 .type K256,@object K256: .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .long 0,0,0,0, 0,0,0,0, -1,-1,-1,-1 .long 0,0,0,0, 0,0,0,0 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .align 64 .type aesni_cbc_sha256_enc_xop,@function .align 64 aesni_cbc_sha256_enc_xop: .cfi_startproc .Lxop_shortcut: movq 8(%rsp),%r10 movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 subq $128,%rsp andq $-64,%rsp shlq $6,%rdx subq %rdi,%rsi subq %rdi,%r10 addq %rdi,%rdx movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %r8,64+32(%rsp) movq %r9,64+40(%rsp) movq %r10,64+48(%rsp) movq %rax,120(%rsp) .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08 .Lprologue_xop: vzeroall movq %rdi,%r12 leaq 128(%rcx),%rdi leaq K256+544(%rip),%r13 movl 240-128(%rdi),%r14d movq %r9,%r15 movq %r10,%rsi vmovdqu (%r8),%xmm8 subq $9,%r14 movl 0(%r15),%eax movl 4(%r15),%ebx movl 8(%r15),%ecx movl 12(%r15),%edx movl 16(%r15),%r8d movl 20(%r15),%r9d movl 24(%r15),%r10d movl 28(%r15),%r11d vmovdqa 0(%r13,%r14,8),%xmm14 vmovdqa 16(%r13,%r14,8),%xmm13 vmovdqa 32(%r13,%r14,8),%xmm12 vmovdqu 0-128(%rdi),%xmm10 jmp .Lloop_xop .align 16 .Lloop_xop: vmovdqa K256+512(%rip),%xmm7 vmovdqu 0(%rsi,%r12,1),%xmm0 vmovdqu 16(%rsi,%r12,1),%xmm1 vmovdqu 32(%rsi,%r12,1),%xmm2 vmovdqu 48(%rsi,%r12,1),%xmm3 vpshufb %xmm7,%xmm0,%xmm0 leaq K256(%rip),%rbp vpshufb %xmm7,%xmm1,%xmm1 vpshufb %xmm7,%xmm2,%xmm2 vpaddd 0(%rbp),%xmm0,%xmm4 vpshufb %xmm7,%xmm3,%xmm3 vpaddd 32(%rbp),%xmm1,%xmm5 vpaddd 64(%rbp),%xmm2,%xmm6 vpaddd 96(%rbp),%xmm3,%xmm7 vmovdqa %xmm4,0(%rsp) movl %eax,%r14d vmovdqa %xmm5,16(%rsp) movl %ebx,%esi vmovdqa %xmm6,32(%rsp) xorl %ecx,%esi vmovdqa %xmm7,48(%rsp) movl %r8d,%r13d jmp .Lxop_00_47 .align 16 .Lxop_00_47: subq $-32*4,%rbp vmovdqu (%r12),%xmm9 movq %r12,64+0(%rsp) vpalignr $4,%xmm0,%xmm1,%xmm4 rorl $14,%r13d movl %r14d,%eax vpalignr $4,%xmm2,%xmm3,%xmm7 movl %r9d,%r12d xorl %r8d,%r13d .byte 143,232,120,194,236,14 rorl $9,%r14d xorl %r10d,%r12d vpsrld $3,%xmm4,%xmm4 rorl $5,%r13d xorl %eax,%r14d vpaddd %xmm7,%xmm0,%xmm0 andl %r8d,%r12d vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d .byte 143,232,120,194,245,11 rorl $11,%r14d xorl %r10d,%r12d vpxor %xmm5,%xmm4,%xmm4 xorl %ebx,%r15d rorl $6,%r13d addl %r12d,%r11d andl %r15d,%esi .byte 143,232,120,194,251,13 xorl %eax,%r14d addl %r13d,%r11d vpxor %xmm6,%xmm4,%xmm4 xorl %ebx,%esi addl %r11d,%edx vpsrld $10,%xmm3,%xmm6 rorl $2,%r14d addl %esi,%r11d vpaddd %xmm4,%xmm0,%xmm0 movl %edx,%r13d addl %r11d,%r14d .byte 143,232,120,194,239,2 rorl $14,%r13d movl %r14d,%r11d vpxor %xmm6,%xmm7,%xmm7 movl %r8d,%r12d xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%r12d vpxor %xmm5,%xmm7,%xmm7 rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d vpxor %xmm8,%xmm9,%xmm9 xorl %edx,%r13d vpsrldq $8,%xmm7,%xmm7 addl 4(%rsp),%r10d movl %r11d,%esi rorl $11,%r14d xorl %r9d,%r12d vpaddd %xmm7,%xmm0,%xmm0 xorl %eax,%esi rorl $6,%r13d addl %r12d,%r10d andl %esi,%r15d .byte 143,232,120,194,248,13 xorl %r11d,%r14d addl %r13d,%r10d vpsrld $10,%xmm0,%xmm6 xorl %eax,%r15d addl %r10d,%ecx .byte 143,232,120,194,239,2 rorl $2,%r14d addl %r15d,%r10d vpxor %xmm6,%xmm7,%xmm7 movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d vpxor %xmm5,%xmm7,%xmm7 movl %edx,%r12d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r12d vpslldq $8,%xmm7,%xmm7 rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %ecx,%r13d vpaddd %xmm7,%xmm0,%xmm0 addl 8(%rsp),%r9d movl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r12d vpaddd 0(%rbp),%xmm0,%xmm6 xorl %r11d,%r15d rorl $6,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx rorl $2,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%esi rorl $11,%r14d xorl %edx,%r12d xorl %r10d,%esi rorl $6,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax rorl $2,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,0(%rsp) vpalignr $4,%xmm1,%xmm2,%xmm4 rorl $14,%r13d movl %r14d,%r8d vpalignr $4,%xmm3,%xmm0,%xmm7 movl %ebx,%r12d xorl %eax,%r13d .byte 143,232,120,194,236,14 rorl $9,%r14d xorl %ecx,%r12d vpsrld $3,%xmm4,%xmm4 rorl $5,%r13d xorl %r8d,%r14d vpaddd %xmm7,%xmm1,%xmm1 andl %eax,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d .byte 143,232,120,194,245,11 rorl $11,%r14d xorl %ecx,%r12d vpxor %xmm5,%xmm4,%xmm4 xorl %r9d,%r15d rorl $6,%r13d addl %r12d,%edx andl %r15d,%esi .byte 143,232,120,194,248,13 xorl %r8d,%r14d addl %r13d,%edx vpxor %xmm6,%xmm4,%xmm4 xorl %r9d,%esi addl %edx,%r11d vpsrld $10,%xmm0,%xmm6 rorl $2,%r14d addl %esi,%edx vpaddd %xmm4,%xmm1,%xmm1 movl %r11d,%r13d addl %edx,%r14d .byte 143,232,120,194,239,2 rorl $14,%r13d movl %r14d,%edx vpxor %xmm6,%xmm7,%xmm7 movl %eax,%r12d xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%r12d vpxor %xmm5,%xmm7,%xmm7 rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r11d,%r13d vpsrldq $8,%xmm7,%xmm7 addl 20(%rsp),%ecx movl %edx,%esi rorl $11,%r14d xorl %ebx,%r12d vpaddd %xmm7,%xmm1,%xmm1 xorl %r8d,%esi rorl $6,%r13d addl %r12d,%ecx andl %esi,%r15d .byte 143,232,120,194,249,13 xorl %edx,%r14d addl %r13d,%ecx vpsrld $10,%xmm1,%xmm6 xorl %r8d,%r15d addl %ecx,%r10d .byte 143,232,120,194,239,2 rorl $2,%r14d addl %r15d,%ecx vpxor %xmm6,%xmm7,%xmm7 movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx vpxor %xmm5,%xmm7,%xmm7 movl %r11d,%r12d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r12d vpslldq $8,%xmm7,%xmm7 rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r10d,%r13d vpaddd %xmm7,%xmm1,%xmm1 addl 24(%rsp),%ebx movl %ecx,%r15d rorl $11,%r14d xorl %eax,%r12d vpaddd 32(%rbp),%xmm1,%xmm6 xorl %edx,%r15d rorl $6,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d rorl $2,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%esi rorl $11,%r14d xorl %r11d,%r12d xorl %ecx,%esi rorl $6,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d rorl $2,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,16(%rsp) vpalignr $4,%xmm2,%xmm3,%xmm4 rorl $14,%r13d movl %r14d,%eax vpalignr $4,%xmm0,%xmm1,%xmm7 movl %r9d,%r12d xorl %r8d,%r13d .byte 143,232,120,194,236,14 rorl $9,%r14d xorl %r10d,%r12d vpsrld $3,%xmm4,%xmm4 rorl $5,%r13d xorl %eax,%r14d vpaddd %xmm7,%xmm2,%xmm2 andl %r8d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d .byte 143,232,120,194,245,11 rorl $11,%r14d xorl %r10d,%r12d vpxor %xmm5,%xmm4,%xmm4 xorl %ebx,%r15d rorl $6,%r13d addl %r12d,%r11d andl %r15d,%esi .byte 143,232,120,194,249,13 xorl %eax,%r14d addl %r13d,%r11d vpxor %xmm6,%xmm4,%xmm4 xorl %ebx,%esi addl %r11d,%edx vpsrld $10,%xmm1,%xmm6 rorl $2,%r14d addl %esi,%r11d vpaddd %xmm4,%xmm2,%xmm2 movl %edx,%r13d addl %r11d,%r14d .byte 143,232,120,194,239,2 rorl $14,%r13d movl %r14d,%r11d vpxor %xmm6,%xmm7,%xmm7 movl %r8d,%r12d xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%r12d vpxor %xmm5,%xmm7,%xmm7 rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %edx,%r13d vpsrldq $8,%xmm7,%xmm7 addl 36(%rsp),%r10d movl %r11d,%esi rorl $11,%r14d xorl %r9d,%r12d vpaddd %xmm7,%xmm2,%xmm2 xorl %eax,%esi rorl $6,%r13d addl %r12d,%r10d andl %esi,%r15d .byte 143,232,120,194,250,13 xorl %r11d,%r14d addl %r13d,%r10d vpsrld $10,%xmm2,%xmm6 xorl %eax,%r15d addl %r10d,%ecx .byte 143,232,120,194,239,2 rorl $2,%r14d addl %r15d,%r10d vpxor %xmm6,%xmm7,%xmm7 movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d vpxor %xmm5,%xmm7,%xmm7 movl %edx,%r12d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r12d vpslldq $8,%xmm7,%xmm7 rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %ecx,%r13d vpaddd %xmm7,%xmm2,%xmm2 addl 40(%rsp),%r9d movl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r12d vpaddd 64(%rbp),%xmm2,%xmm6 xorl %r11d,%r15d rorl $6,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx rorl $2,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%esi rorl $11,%r14d xorl %edx,%r12d xorl %r10d,%esi rorl $6,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax rorl $2,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,32(%rsp) vpalignr $4,%xmm3,%xmm0,%xmm4 rorl $14,%r13d movl %r14d,%r8d vpalignr $4,%xmm1,%xmm2,%xmm7 movl %ebx,%r12d xorl %eax,%r13d .byte 143,232,120,194,236,14 rorl $9,%r14d xorl %ecx,%r12d vpsrld $3,%xmm4,%xmm4 rorl $5,%r13d xorl %r8d,%r14d vpaddd %xmm7,%xmm3,%xmm3 andl %eax,%r12d vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d .byte 143,232,120,194,245,11 rorl $11,%r14d xorl %ecx,%r12d vpxor %xmm5,%xmm4,%xmm4 xorl %r9d,%r15d rorl $6,%r13d addl %r12d,%edx andl %r15d,%esi .byte 143,232,120,194,250,13 xorl %r8d,%r14d addl %r13d,%edx vpxor %xmm6,%xmm4,%xmm4 xorl %r9d,%esi addl %edx,%r11d vpsrld $10,%xmm2,%xmm6 rorl $2,%r14d addl %esi,%edx vpaddd %xmm4,%xmm3,%xmm3 movl %r11d,%r13d addl %edx,%r14d .byte 143,232,120,194,239,2 rorl $14,%r13d movl %r14d,%edx vpxor %xmm6,%xmm7,%xmm7 movl %eax,%r12d xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%r12d vpxor %xmm5,%xmm7,%xmm7 rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r11d,%r13d vpsrldq $8,%xmm7,%xmm7 addl 52(%rsp),%ecx movl %edx,%esi rorl $11,%r14d xorl %ebx,%r12d vpaddd %xmm7,%xmm3,%xmm3 xorl %r8d,%esi rorl $6,%r13d addl %r12d,%ecx andl %esi,%r15d .byte 143,232,120,194,251,13 xorl %edx,%r14d addl %r13d,%ecx vpsrld $10,%xmm3,%xmm6 xorl %r8d,%r15d addl %ecx,%r10d .byte 143,232,120,194,239,2 rorl $2,%r14d addl %r15d,%ecx vpxor %xmm6,%xmm7,%xmm7 movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx vpxor %xmm5,%xmm7,%xmm7 movl %r11d,%r12d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r12d vpslldq $8,%xmm7,%xmm7 rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r10d,%r13d vpaddd %xmm7,%xmm3,%xmm3 addl 56(%rsp),%ebx movl %ecx,%r15d rorl $11,%r14d xorl %eax,%r12d vpaddd 96(%rbp),%xmm3,%xmm6 xorl %edx,%r15d rorl $6,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d rorl $2,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%esi rorl $11,%r14d xorl %r11d,%r12d xorl %ecx,%esi rorl $6,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d rorl $2,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,48(%rsp) movq 64+0(%rsp),%r12 vpand %xmm14,%xmm11,%xmm11 movq 64+8(%rsp),%r15 vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r15,%r12,1) leaq 16(%r12),%r12 cmpb $0,131(%rbp) jne .Lxop_00_47 vmovdqu (%r12),%xmm9 movq %r12,64+0(%rsp) rorl $14,%r13d movl %r14d,%eax movl %r9d,%r12d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d andl %r8d,%r12d vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d rorl $11,%r14d xorl %r10d,%r12d xorl %ebx,%r15d rorl $6,%r13d addl %r12d,%r11d andl %r15d,%esi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi addl %r11d,%edx rorl $2,%r14d addl %esi,%r11d movl %edx,%r13d addl %r11d,%r14d rorl $14,%r13d movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%r12d rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d vpxor %xmm8,%xmm9,%xmm9 xorl %edx,%r13d addl 4(%rsp),%r10d movl %r11d,%esi rorl $11,%r14d xorl %r9d,%r12d xorl %eax,%esi rorl $6,%r13d addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx rorl $2,%r14d addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r12d rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r12d xorl %r11d,%r15d rorl $6,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx rorl $2,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%esi rorl $11,%r14d xorl %edx,%r12d xorl %r10d,%esi rorl $6,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax rorl $2,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d rorl $14,%r13d movl %r14d,%r8d movl %ebx,%r12d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d andl %eax,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r12d xorl %r9d,%r15d rorl $6,%r13d addl %r12d,%edx andl %r15d,%esi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi addl %edx,%r11d rorl $2,%r14d addl %esi,%edx movl %r11d,%r13d addl %edx,%r14d rorl $14,%r13d movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%r12d rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r11d,%r13d addl 20(%rsp),%ecx movl %edx,%esi rorl $11,%r14d xorl %ebx,%r12d xorl %r8d,%esi rorl $6,%r13d addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d rorl $2,%r14d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r12d rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d rorl $11,%r14d xorl %eax,%r12d xorl %edx,%r15d rorl $6,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d rorl $2,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%esi rorl $11,%r14d xorl %r11d,%r12d xorl %ecx,%esi rorl $6,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d rorl $2,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d rorl $14,%r13d movl %r14d,%eax movl %r9d,%r12d xorl %r8d,%r13d rorl $9,%r14d xorl %r10d,%r12d rorl $5,%r13d xorl %eax,%r14d andl %r8d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d rorl $11,%r14d xorl %r10d,%r12d xorl %ebx,%r15d rorl $6,%r13d addl %r12d,%r11d andl %r15d,%esi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi addl %r11d,%edx rorl $2,%r14d addl %esi,%r11d movl %edx,%r13d addl %r11d,%r14d rorl $14,%r13d movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d rorl $9,%r14d xorl %r9d,%r12d rorl $5,%r13d xorl %r11d,%r14d andl %edx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %edx,%r13d addl 36(%rsp),%r10d movl %r11d,%esi rorl $11,%r14d xorl %r9d,%r12d xorl %eax,%esi rorl $6,%r13d addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx rorl $2,%r14d addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d rorl $14,%r13d movl %r14d,%r10d movl %edx,%r12d xorl %ecx,%r13d rorl $9,%r14d xorl %r8d,%r12d rorl $5,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d rorl $11,%r14d xorl %r8d,%r12d xorl %r11d,%r15d rorl $6,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx rorl $2,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d rorl $14,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d rorl $9,%r14d xorl %edx,%r12d rorl $5,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%esi rorl $11,%r14d xorl %edx,%r12d xorl %r10d,%esi rorl $6,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax rorl $2,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d rorl $14,%r13d movl %r14d,%r8d movl %ebx,%r12d xorl %eax,%r13d rorl $9,%r14d xorl %ecx,%r12d rorl $5,%r13d xorl %r8d,%r14d andl %eax,%r12d vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d rorl $11,%r14d xorl %ecx,%r12d xorl %r9d,%r15d rorl $6,%r13d addl %r12d,%edx andl %r15d,%esi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi addl %edx,%r11d rorl $2,%r14d addl %esi,%edx movl %r11d,%r13d addl %edx,%r14d rorl $14,%r13d movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d rorl $9,%r14d xorl %ebx,%r12d rorl $5,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r11d,%r13d addl 52(%rsp),%ecx movl %edx,%esi rorl $11,%r14d xorl %ebx,%r12d xorl %r8d,%esi rorl $6,%r13d addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d rorl $2,%r14d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d rorl $14,%r13d movl %r14d,%ecx movl %r11d,%r12d xorl %r10d,%r13d rorl $9,%r14d xorl %eax,%r12d rorl $5,%r13d xorl %ecx,%r14d andl %r10d,%r12d vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d rorl $11,%r14d xorl %eax,%r12d xorl %edx,%r15d rorl $6,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d rorl $2,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d rorl $14,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d rorl $9,%r14d xorl %r11d,%r12d rorl $5,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%esi rorl $11,%r14d xorl %r11d,%r12d xorl %ecx,%esi rorl $6,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d rorl $2,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d movq 64+0(%rsp),%r12 movq 64+8(%rsp),%r13 movq 64+40(%rsp),%r15 movq 64+48(%rsp),%rsi vpand %xmm14,%xmm11,%xmm11 movl %r14d,%eax vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r12,%r13,1) leaq 16(%r12),%r12 addl 0(%r15),%eax addl 4(%r15),%ebx addl 8(%r15),%ecx addl 12(%r15),%edx addl 16(%r15),%r8d addl 20(%r15),%r9d addl 24(%r15),%r10d addl 28(%r15),%r11d cmpq 64+16(%rsp),%r12 movl %eax,0(%r15) movl %ebx,4(%r15) movl %ecx,8(%r15) movl %edx,12(%r15) movl %r8d,16(%r15) movl %r9d,20(%r15) movl %r10d,24(%r15) movl %r11d,28(%r15) jb .Lloop_xop movq 64+32(%rsp),%r8 movq 120(%rsp),%rsi .cfi_def_cfa %rsi,8 vmovdqu %xmm8,(%r8) vzeroall movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue_xop: .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha256_enc_xop,.-aesni_cbc_sha256_enc_xop .type aesni_cbc_sha256_enc_avx,@function .align 64 aesni_cbc_sha256_enc_avx: .cfi_startproc .Lavx_shortcut: movq 8(%rsp),%r10 movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 subq $128,%rsp andq $-64,%rsp shlq $6,%rdx subq %rdi,%rsi subq %rdi,%r10 addq %rdi,%rdx movq %rsi,64+8(%rsp) movq %rdx,64+16(%rsp) movq %r8,64+32(%rsp) movq %r9,64+40(%rsp) movq %r10,64+48(%rsp) movq %rax,120(%rsp) .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08 .Lprologue_avx: vzeroall movq %rdi,%r12 leaq 128(%rcx),%rdi leaq K256+544(%rip),%r13 movl 240-128(%rdi),%r14d movq %r9,%r15 movq %r10,%rsi vmovdqu (%r8),%xmm8 subq $9,%r14 movl 0(%r15),%eax movl 4(%r15),%ebx movl 8(%r15),%ecx movl 12(%r15),%edx movl 16(%r15),%r8d movl 20(%r15),%r9d movl 24(%r15),%r10d movl 28(%r15),%r11d vmovdqa 0(%r13,%r14,8),%xmm14 vmovdqa 16(%r13,%r14,8),%xmm13 vmovdqa 32(%r13,%r14,8),%xmm12 vmovdqu 0-128(%rdi),%xmm10 jmp .Lloop_avx .align 16 .Lloop_avx: vmovdqa K256+512(%rip),%xmm7 vmovdqu 0(%rsi,%r12,1),%xmm0 vmovdqu 16(%rsi,%r12,1),%xmm1 vmovdqu 32(%rsi,%r12,1),%xmm2 vmovdqu 48(%rsi,%r12,1),%xmm3 vpshufb %xmm7,%xmm0,%xmm0 leaq K256(%rip),%rbp vpshufb %xmm7,%xmm1,%xmm1 vpshufb %xmm7,%xmm2,%xmm2 vpaddd 0(%rbp),%xmm0,%xmm4 vpshufb %xmm7,%xmm3,%xmm3 vpaddd 32(%rbp),%xmm1,%xmm5 vpaddd 64(%rbp),%xmm2,%xmm6 vpaddd 96(%rbp),%xmm3,%xmm7 vmovdqa %xmm4,0(%rsp) movl %eax,%r14d vmovdqa %xmm5,16(%rsp) movl %ebx,%esi vmovdqa %xmm6,32(%rsp) xorl %ecx,%esi vmovdqa %xmm7,48(%rsp) movl %r8d,%r13d jmp .Lavx_00_47 .align 16 .Lavx_00_47: subq $-32*4,%rbp vmovdqu (%r12),%xmm9 movq %r12,64+0(%rsp) vpalignr $4,%xmm0,%xmm1,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d vpalignr $4,%xmm2,%xmm3,%xmm7 xorl %r8d,%r13d shrdl $9,%r14d,%r14d xorl %r10d,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpaddd %xmm7,%xmm0,%xmm0 vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d vpsrld $3,%xmm4,%xmm7 shrdl $11,%r14d,%r14d xorl %r10d,%r12d xorl %ebx,%r15d vpslld $14,%xmm4,%xmm5 shrdl $6,%r13d,%r13d addl %r12d,%r11d andl %r15d,%esi vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi vpshufd $250,%xmm3,%xmm7 addl %r11d,%edx shrdl $2,%r14d,%r14d addl %esi,%r11d vpsrld $11,%xmm6,%xmm6 movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d vpslld $11,%xmm5,%xmm5 shrdl $9,%r14d,%r14d xorl %r9d,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %r11d,%r14d andl %edx,%r12d vpxor %xmm8,%xmm9,%xmm9 xorl %edx,%r13d vpsrld $10,%xmm7,%xmm6 addl 4(%rsp),%r10d movl %r11d,%esi shrdl $11,%r14d,%r14d vpxor %xmm5,%xmm4,%xmm4 xorl %r9d,%r12d xorl %eax,%esi shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d vpaddd %xmm4,%xmm0,%xmm0 addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx vpxor %xmm7,%xmm6,%xmm6 shrdl $2,%r14d,%r14d addl %r15d,%r10d movl %ecx,%r13d vpsrlq $2,%xmm7,%xmm7 addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d vpxor %xmm7,%xmm6,%xmm6 movl %edx,%r12d xorl %ecx,%r13d shrdl $9,%r14d,%r14d vpshufd $132,%xmm6,%xmm6 xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d vpsrldq $8,%xmm6,%xmm6 andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %ecx,%r13d addl 8(%rsp),%r9d vpaddd %xmm6,%xmm0,%xmm0 movl %r10d,%r15d shrdl $11,%r14d,%r14d xorl %r8d,%r12d vpshufd $80,%xmm0,%xmm7 xorl %r11d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r9d vpsrld $10,%xmm7,%xmm6 andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d vpsrlq $17,%xmm7,%xmm7 xorl %r11d,%esi addl %r9d,%ebx shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d vpsrlq $2,%xmm7,%xmm7 shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d vpxor %xmm7,%xmm6,%xmm6 xorl %ebx,%r13d shrdl $9,%r14d,%r14d xorl %edx,%r12d vpshufd $232,%xmm6,%xmm6 shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vpslldq $8,%xmm6,%xmm6 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%esi vpaddd %xmm6,%xmm0,%xmm0 shrdl $11,%r14d,%r14d xorl %edx,%r12d xorl %r10d,%esi vpaddd 0(%rbp),%xmm0,%xmm6 shrdl $6,%r13d,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax shrdl $2,%r14d,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,0(%rsp) vpalignr $4,%xmm1,%xmm2,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d vpalignr $4,%xmm3,%xmm0,%xmm7 xorl %eax,%r13d shrdl $9,%r14d,%r14d xorl %ecx,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpaddd %xmm7,%xmm1,%xmm1 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d vpsrld $3,%xmm4,%xmm7 shrdl $11,%r14d,%r14d xorl %ecx,%r12d xorl %r9d,%r15d vpslld $14,%xmm4,%xmm5 shrdl $6,%r13d,%r13d addl %r12d,%edx andl %r15d,%esi vpxor %xmm6,%xmm7,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi vpshufd $250,%xmm0,%xmm7 addl %edx,%r11d shrdl $2,%r14d,%r14d addl %esi,%edx vpsrld $11,%xmm6,%xmm6 movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d vpslld $11,%xmm5,%xmm5 shrdl $9,%r14d,%r14d xorl %ebx,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %edx,%r14d andl %r11d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r11d,%r13d vpsrld $10,%xmm7,%xmm6 addl 20(%rsp),%ecx movl %edx,%esi shrdl $11,%r14d,%r14d vpxor %xmm5,%xmm4,%xmm4 xorl %ebx,%r12d xorl %r8d,%esi shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d vpaddd %xmm4,%xmm1,%xmm1 addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d vpxor %xmm7,%xmm6,%xmm6 shrdl $2,%r14d,%r14d addl %r15d,%ecx movl %r10d,%r13d vpsrlq $2,%xmm7,%xmm7 addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx vpxor %xmm7,%xmm6,%xmm6 movl %r11d,%r12d xorl %r10d,%r13d shrdl $9,%r14d,%r14d vpshufd $132,%xmm6,%xmm6 xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d vpsrldq $8,%xmm6,%xmm6 andl %r10d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r10d,%r13d addl 24(%rsp),%ebx vpaddd %xmm6,%xmm1,%xmm1 movl %ecx,%r15d shrdl $11,%r14d,%r14d xorl %eax,%r12d vpshufd $80,%xmm1,%xmm7 xorl %edx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%ebx vpsrld $10,%xmm7,%xmm6 andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx vpsrlq $17,%xmm7,%xmm7 xorl %edx,%esi addl %ebx,%r9d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d vpsrlq $2,%xmm7,%xmm7 shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d vpxor %xmm7,%xmm6,%xmm6 xorl %r9d,%r13d shrdl $9,%r14d,%r14d xorl %r11d,%r12d vpshufd $232,%xmm6,%xmm6 shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpslldq $8,%xmm6,%xmm6 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%esi vpaddd %xmm6,%xmm1,%xmm1 shrdl $11,%r14d,%r14d xorl %r11d,%r12d xorl %ecx,%esi vpaddd 32(%rbp),%xmm1,%xmm6 shrdl $6,%r13d,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d shrdl $2,%r14d,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,16(%rsp) vpalignr $4,%xmm2,%xmm3,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d vpalignr $4,%xmm0,%xmm1,%xmm7 xorl %r8d,%r13d shrdl $9,%r14d,%r14d xorl %r10d,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpaddd %xmm7,%xmm2,%xmm2 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d vpsrld $3,%xmm4,%xmm7 shrdl $11,%r14d,%r14d xorl %r10d,%r12d xorl %ebx,%r15d vpslld $14,%xmm4,%xmm5 shrdl $6,%r13d,%r13d addl %r12d,%r11d andl %r15d,%esi vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi vpshufd $250,%xmm1,%xmm7 addl %r11d,%edx shrdl $2,%r14d,%r14d addl %esi,%r11d vpsrld $11,%xmm6,%xmm6 movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d vpslld $11,%xmm5,%xmm5 shrdl $9,%r14d,%r14d xorl %r9d,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %r11d,%r14d andl %edx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %edx,%r13d vpsrld $10,%xmm7,%xmm6 addl 36(%rsp),%r10d movl %r11d,%esi shrdl $11,%r14d,%r14d vpxor %xmm5,%xmm4,%xmm4 xorl %r9d,%r12d xorl %eax,%esi shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d vpaddd %xmm4,%xmm2,%xmm2 addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx vpxor %xmm7,%xmm6,%xmm6 shrdl $2,%r14d,%r14d addl %r15d,%r10d movl %ecx,%r13d vpsrlq $2,%xmm7,%xmm7 addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d vpxor %xmm7,%xmm6,%xmm6 movl %edx,%r12d xorl %ecx,%r13d shrdl $9,%r14d,%r14d vpshufd $132,%xmm6,%xmm6 xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d vpsrldq $8,%xmm6,%xmm6 andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %ecx,%r13d addl 40(%rsp),%r9d vpaddd %xmm6,%xmm2,%xmm2 movl %r10d,%r15d shrdl $11,%r14d,%r14d xorl %r8d,%r12d vpshufd $80,%xmm2,%xmm7 xorl %r11d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r9d vpsrld $10,%xmm7,%xmm6 andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d vpsrlq $17,%xmm7,%xmm7 xorl %r11d,%esi addl %r9d,%ebx shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d vpsrlq $2,%xmm7,%xmm7 shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d vpxor %xmm7,%xmm6,%xmm6 xorl %ebx,%r13d shrdl $9,%r14d,%r14d xorl %edx,%r12d vpshufd $232,%xmm6,%xmm6 shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vpslldq $8,%xmm6,%xmm6 vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%esi vpaddd %xmm6,%xmm2,%xmm2 shrdl $11,%r14d,%r14d xorl %edx,%r12d xorl %r10d,%esi vpaddd 64(%rbp),%xmm2,%xmm6 shrdl $6,%r13d,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax shrdl $2,%r14d,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d vmovdqa %xmm6,32(%rsp) vpalignr $4,%xmm3,%xmm0,%xmm4 shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d vpalignr $4,%xmm1,%xmm2,%xmm7 xorl %eax,%r13d shrdl $9,%r14d,%r14d xorl %ecx,%r12d vpsrld $7,%xmm4,%xmm6 shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpaddd %xmm7,%xmm3,%xmm3 vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d vpsrld $3,%xmm4,%xmm7 shrdl $11,%r14d,%r14d xorl %ecx,%r12d xorl %r9d,%r15d vpslld $14,%xmm4,%xmm5 shrdl $6,%r13d,%r13d addl %r12d,%edx andl %r15d,%esi vpxor %xmm6,%xmm7,%xmm4 xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi vpshufd $250,%xmm2,%xmm7 addl %edx,%r11d shrdl $2,%r14d,%r14d addl %esi,%edx vpsrld $11,%xmm6,%xmm6 movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d vpxor %xmm5,%xmm4,%xmm4 movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d vpslld $11,%xmm5,%xmm5 shrdl $9,%r14d,%r14d xorl %ebx,%r12d shrdl $5,%r13d,%r13d vpxor %xmm6,%xmm4,%xmm4 xorl %edx,%r14d andl %r11d,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r11d,%r13d vpsrld $10,%xmm7,%xmm6 addl 52(%rsp),%ecx movl %edx,%esi shrdl $11,%r14d,%r14d vpxor %xmm5,%xmm4,%xmm4 xorl %ebx,%r12d xorl %r8d,%esi shrdl $6,%r13d,%r13d vpsrlq $17,%xmm7,%xmm7 addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d vpaddd %xmm4,%xmm3,%xmm3 addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d vpxor %xmm7,%xmm6,%xmm6 shrdl $2,%r14d,%r14d addl %r15d,%ecx movl %r10d,%r13d vpsrlq $2,%xmm7,%xmm7 addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx vpxor %xmm7,%xmm6,%xmm6 movl %r11d,%r12d xorl %r10d,%r13d shrdl $9,%r14d,%r14d vpshufd $132,%xmm6,%xmm6 xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d vpsrldq $8,%xmm6,%xmm6 andl %r10d,%r12d vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r10d,%r13d addl 56(%rsp),%ebx vpaddd %xmm6,%xmm3,%xmm3 movl %ecx,%r15d shrdl $11,%r14d,%r14d xorl %eax,%r12d vpshufd $80,%xmm3,%xmm7 xorl %edx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%ebx vpsrld $10,%xmm7,%xmm6 andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx vpsrlq $17,%xmm7,%xmm7 xorl %edx,%esi addl %ebx,%r9d shrdl $2,%r14d,%r14d vpxor %xmm7,%xmm6,%xmm6 addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d vpsrlq $2,%xmm7,%xmm7 shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d vpxor %xmm7,%xmm6,%xmm6 xorl %r9d,%r13d shrdl $9,%r14d,%r14d xorl %r11d,%r12d vpshufd $232,%xmm6,%xmm6 shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpslldq $8,%xmm6,%xmm6 vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%esi vpaddd %xmm6,%xmm3,%xmm3 shrdl $11,%r14d,%r14d xorl %r11d,%r12d xorl %ecx,%esi vpaddd 96(%rbp),%xmm3,%xmm6 shrdl $6,%r13d,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d shrdl $2,%r14d,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d vmovdqa %xmm6,48(%rsp) movq 64+0(%rsp),%r12 vpand %xmm14,%xmm11,%xmm11 movq 64+8(%rsp),%r15 vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r15,%r12,1) leaq 16(%r12),%r12 cmpb $0,131(%rbp) jne .Lavx_00_47 vmovdqu (%r12),%xmm9 movq %r12,64+0(%rsp) shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d xorl %r8d,%r13d shrdl $9,%r14d,%r14d xorl %r10d,%r12d shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r8d,%r13d addl 0(%rsp),%r11d movl %eax,%r15d shrdl $11,%r14d,%r14d xorl %r10d,%r12d xorl %ebx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r11d andl %r15d,%esi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi addl %r11d,%edx shrdl $2,%r14d,%r14d addl %esi,%r11d movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d shrdl $9,%r14d,%r14d xorl %r9d,%r12d shrdl $5,%r13d,%r13d xorl %r11d,%r14d andl %edx,%r12d vpxor %xmm8,%xmm9,%xmm9 xorl %edx,%r13d addl 4(%rsp),%r10d movl %r11d,%esi shrdl $11,%r14d,%r14d xorl %r9d,%r12d xorl %eax,%esi shrdl $6,%r13d,%r13d addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx shrdl $2,%r14d,%r14d addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d movl %edx,%r12d xorl %ecx,%r13d shrdl $9,%r14d,%r14d xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %ecx,%r13d addl 8(%rsp),%r9d movl %r10d,%r15d shrdl $11,%r14d,%r14d xorl %r8d,%r12d xorl %r11d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx shrdl $2,%r14d,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d shrdl $9,%r14d,%r14d xorl %edx,%r12d shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %ebx,%r13d addl 12(%rsp),%r8d movl %r9d,%esi shrdl $11,%r14d,%r14d xorl %edx,%r12d xorl %r10d,%esi shrdl $6,%r13d,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax shrdl $2,%r14d,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d xorl %eax,%r13d shrdl $9,%r14d,%r14d xorl %ecx,%r12d shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %eax,%r13d addl 16(%rsp),%edx movl %r8d,%r15d shrdl $11,%r14d,%r14d xorl %ecx,%r12d xorl %r9d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%edx andl %r15d,%esi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi addl %edx,%r11d shrdl $2,%r14d,%r14d addl %esi,%edx movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d shrdl $9,%r14d,%r14d xorl %ebx,%r12d shrdl $5,%r13d,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r11d,%r13d addl 20(%rsp),%ecx movl %edx,%esi shrdl $11,%r14d,%r14d xorl %ebx,%r12d xorl %r8d,%esi shrdl $6,%r13d,%r13d addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d shrdl $2,%r14d,%r14d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx movl %r11d,%r12d xorl %r10d,%r13d shrdl $9,%r14d,%r14d xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d andl %r10d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r10d,%r13d addl 24(%rsp),%ebx movl %ecx,%r15d shrdl $11,%r14d,%r14d xorl %eax,%r12d xorl %edx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d shrdl $2,%r14d,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d shrdl $9,%r14d,%r14d xorl %r11d,%r12d shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r9d,%r13d addl 28(%rsp),%eax movl %ebx,%esi shrdl $11,%r14d,%r14d xorl %r11d,%r12d xorl %ecx,%esi shrdl $6,%r13d,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d shrdl $2,%r14d,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d shrdl $14,%r13d,%r13d movl %r14d,%eax movl %r9d,%r12d xorl %r8d,%r13d shrdl $9,%r14d,%r14d xorl %r10d,%r12d shrdl $5,%r13d,%r13d xorl %eax,%r14d andl %r8d,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r8d,%r13d addl 32(%rsp),%r11d movl %eax,%r15d shrdl $11,%r14d,%r14d xorl %r10d,%r12d xorl %ebx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r11d andl %r15d,%esi xorl %eax,%r14d addl %r13d,%r11d xorl %ebx,%esi addl %r11d,%edx shrdl $2,%r14d,%r14d addl %esi,%r11d movl %edx,%r13d addl %r11d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r11d movl %r8d,%r12d xorl %edx,%r13d shrdl $9,%r14d,%r14d xorl %r9d,%r12d shrdl $5,%r13d,%r13d xorl %r11d,%r14d andl %edx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %edx,%r13d addl 36(%rsp),%r10d movl %r11d,%esi shrdl $11,%r14d,%r14d xorl %r9d,%r12d xorl %eax,%esi shrdl $6,%r13d,%r13d addl %r12d,%r10d andl %esi,%r15d xorl %r11d,%r14d addl %r13d,%r10d xorl %eax,%r15d addl %r10d,%ecx shrdl $2,%r14d,%r14d addl %r15d,%r10d movl %ecx,%r13d addl %r10d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r10d movl %edx,%r12d xorl %ecx,%r13d shrdl $9,%r14d,%r14d xorl %r8d,%r12d shrdl $5,%r13d,%r13d xorl %r10d,%r14d andl %ecx,%r12d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %ecx,%r13d addl 40(%rsp),%r9d movl %r10d,%r15d shrdl $11,%r14d,%r14d xorl %r8d,%r12d xorl %r11d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%r9d andl %r15d,%esi xorl %r10d,%r14d addl %r13d,%r9d xorl %r11d,%esi addl %r9d,%ebx shrdl $2,%r14d,%r14d addl %esi,%r9d movl %ebx,%r13d addl %r9d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r9d movl %ecx,%r12d xorl %ebx,%r13d shrdl $9,%r14d,%r14d xorl %edx,%r12d shrdl $5,%r13d,%r13d xorl %r9d,%r14d andl %ebx,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %ebx,%r13d addl 44(%rsp),%r8d movl %r9d,%esi shrdl $11,%r14d,%r14d xorl %edx,%r12d xorl %r10d,%esi shrdl $6,%r13d,%r13d addl %r12d,%r8d andl %esi,%r15d xorl %r9d,%r14d addl %r13d,%r8d xorl %r10d,%r15d addl %r8d,%eax shrdl $2,%r14d,%r14d addl %r15d,%r8d movl %eax,%r13d addl %r8d,%r14d shrdl $14,%r13d,%r13d movl %r14d,%r8d movl %ebx,%r12d xorl %eax,%r13d shrdl $9,%r14d,%r14d xorl %ecx,%r12d shrdl $5,%r13d,%r13d xorl %r8d,%r14d andl %eax,%r12d vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %eax,%r13d addl 48(%rsp),%edx movl %r8d,%r15d shrdl $11,%r14d,%r14d xorl %ecx,%r12d xorl %r9d,%r15d shrdl $6,%r13d,%r13d addl %r12d,%edx andl %r15d,%esi xorl %r8d,%r14d addl %r13d,%edx xorl %r9d,%esi addl %edx,%r11d shrdl $2,%r14d,%r14d addl %esi,%edx movl %r11d,%r13d addl %edx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%edx movl %eax,%r12d xorl %r11d,%r13d shrdl $9,%r14d,%r14d xorl %ebx,%r12d shrdl $5,%r13d,%r13d xorl %edx,%r14d andl %r11d,%r12d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r11d,%r13d addl 52(%rsp),%ecx movl %edx,%esi shrdl $11,%r14d,%r14d xorl %ebx,%r12d xorl %r8d,%esi shrdl $6,%r13d,%r13d addl %r12d,%ecx andl %esi,%r15d xorl %edx,%r14d addl %r13d,%ecx xorl %r8d,%r15d addl %ecx,%r10d shrdl $2,%r14d,%r14d addl %r15d,%ecx movl %r10d,%r13d addl %ecx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ecx movl %r11d,%r12d xorl %r10d,%r13d shrdl $9,%r14d,%r14d xorl %eax,%r12d shrdl $5,%r13d,%r13d xorl %ecx,%r14d andl %r10d,%r12d vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r10d,%r13d addl 56(%rsp),%ebx movl %ecx,%r15d shrdl $11,%r14d,%r14d xorl %eax,%r12d xorl %edx,%r15d shrdl $6,%r13d,%r13d addl %r12d,%ebx andl %r15d,%esi xorl %ecx,%r14d addl %r13d,%ebx xorl %edx,%esi addl %ebx,%r9d shrdl $2,%r14d,%r14d addl %esi,%ebx movl %r9d,%r13d addl %ebx,%r14d shrdl $14,%r13d,%r13d movl %r14d,%ebx movl %r10d,%r12d xorl %r9d,%r13d shrdl $9,%r14d,%r14d xorl %r11d,%r12d shrdl $5,%r13d,%r13d xorl %ebx,%r14d andl %r9d,%r12d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r9d,%r13d addl 60(%rsp),%eax movl %ebx,%esi shrdl $11,%r14d,%r14d xorl %r11d,%r12d xorl %ecx,%esi shrdl $6,%r13d,%r13d addl %r12d,%eax andl %esi,%r15d xorl %ebx,%r14d addl %r13d,%eax xorl %ecx,%r15d addl %eax,%r8d shrdl $2,%r14d,%r14d addl %r15d,%eax movl %r8d,%r13d addl %eax,%r14d movq 64+0(%rsp),%r12 movq 64+8(%rsp),%r13 movq 64+40(%rsp),%r15 movq 64+48(%rsp),%rsi vpand %xmm14,%xmm11,%xmm11 movl %r14d,%eax vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r12,%r13,1) leaq 16(%r12),%r12 addl 0(%r15),%eax addl 4(%r15),%ebx addl 8(%r15),%ecx addl 12(%r15),%edx addl 16(%r15),%r8d addl 20(%r15),%r9d addl 24(%r15),%r10d addl 28(%r15),%r11d cmpq 64+16(%rsp),%r12 movl %eax,0(%r15) movl %ebx,4(%r15) movl %ecx,8(%r15) movl %edx,12(%r15) movl %r8d,16(%r15) movl %r9d,20(%r15) movl %r10d,24(%r15) movl %r11d,28(%r15) jb .Lloop_avx movq 64+32(%rsp),%r8 movq 120(%rsp),%rsi .cfi_def_cfa %rsi,8 vmovdqu %xmm8,(%r8) vzeroall movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue_avx: .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha256_enc_avx,.-aesni_cbc_sha256_enc_avx .type aesni_cbc_sha256_enc_avx2,@function .align 64 aesni_cbc_sha256_enc_avx2: .cfi_startproc .Lavx2_shortcut: movq 8(%rsp),%r10 movq %rsp,%rax .cfi_def_cfa_register %rax pushq %rbx .cfi_offset %rbx,-16 pushq %rbp .cfi_offset %rbp,-24 pushq %r12 .cfi_offset %r12,-32 pushq %r13 .cfi_offset %r13,-40 pushq %r14 .cfi_offset %r14,-48 pushq %r15 .cfi_offset %r15,-56 subq $576,%rsp andq $-1024,%rsp addq $448,%rsp shlq $6,%rdx subq %rdi,%rsi subq %rdi,%r10 addq %rdi,%rdx movq %rdx,64+16(%rsp) movq %r8,64+32(%rsp) movq %r9,64+40(%rsp) movq %r10,64+48(%rsp) movq %rax,120(%rsp) .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08 .Lprologue_avx2: vzeroall movq %rdi,%r13 vpinsrq $1,%rsi,%xmm15,%xmm15 leaq 128(%rcx),%rdi leaq K256+544(%rip),%r12 movl 240-128(%rdi),%r14d movq %r9,%r15 movq %r10,%rsi vmovdqu (%r8),%xmm8 leaq -9(%r14),%r14 vmovdqa 0(%r12,%r14,8),%xmm14 vmovdqa 16(%r12,%r14,8),%xmm13 vmovdqa 32(%r12,%r14,8),%xmm12 subq $-64,%r13 movl 0(%r15),%eax leaq (%rsi,%r13,1),%r12 movl 4(%r15),%ebx cmpq %rdx,%r13 movl 8(%r15),%ecx cmoveq %rsp,%r12 movl 12(%r15),%edx movl 16(%r15),%r8d movl 20(%r15),%r9d movl 24(%r15),%r10d movl 28(%r15),%r11d vmovdqu 0-128(%rdi),%xmm10 jmp .Loop_avx2 .align 16 .Loop_avx2: vmovdqa K256+512(%rip),%ymm7 vmovdqu -64+0(%rsi,%r13,1),%xmm0 vmovdqu -64+16(%rsi,%r13,1),%xmm1 vmovdqu -64+32(%rsi,%r13,1),%xmm2 vmovdqu -64+48(%rsi,%r13,1),%xmm3 vinserti128 $1,(%r12),%ymm0,%ymm0 vinserti128 $1,16(%r12),%ymm1,%ymm1 vpshufb %ymm7,%ymm0,%ymm0 vinserti128 $1,32(%r12),%ymm2,%ymm2 vpshufb %ymm7,%ymm1,%ymm1 vinserti128 $1,48(%r12),%ymm3,%ymm3 leaq K256(%rip),%rbp vpshufb %ymm7,%ymm2,%ymm2 leaq -64(%r13),%r13 vpaddd 0(%rbp),%ymm0,%ymm4 vpshufb %ymm7,%ymm3,%ymm3 vpaddd 32(%rbp),%ymm1,%ymm5 vpaddd 64(%rbp),%ymm2,%ymm6 vpaddd 96(%rbp),%ymm3,%ymm7 vmovdqa %ymm4,0(%rsp) xorl %r14d,%r14d vmovdqa %ymm5,32(%rsp) movq 120(%rsp),%rsi .cfi_def_cfa %rsi,8 leaq -64(%rsp),%rsp movq %rsi,-8(%rsp) .cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 movl %ebx,%esi vmovdqa %ymm6,0(%rsp) xorl %ecx,%esi vmovdqa %ymm7,32(%rsp) movl %r9d,%r12d subq $-32*4,%rbp jmp .Lavx2_00_47 .align 16 .Lavx2_00_47: vmovdqu (%r13),%xmm9 vpinsrq $0,%r13,%xmm15,%xmm15 leaq -64(%rsp),%rsp .cfi_escape 0x0f,0x05,0x77,0x38,0x06,0x23,0x08 pushq 64-8(%rsp) .cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08 leaq 8(%rsp),%rsp .cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpalignr $4,%ymm0,%ymm1,%ymm4 addl 0+128(%rsp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d vpalignr $4,%ymm2,%ymm3,%ymm7 rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d vpsrld $7,%ymm4,%ymm6 andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d vpaddd %ymm7,%ymm0,%ymm0 leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d vpsrld $3,%ymm4,%ymm7 rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d vpslld $14,%ymm4,%ymm5 rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx vpxor %ymm6,%ymm7,%ymm4 andl %r15d,%esi vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi vpshufd $250,%ymm3,%ymm7 xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d vpsrld $11,%ymm6,%ymm6 addl 4+128(%rsp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d vpxor %ymm5,%ymm4,%ymm4 rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d vpslld $11,%ymm5,%ymm5 andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d vpxor %ymm6,%ymm4,%ymm4 leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi vpsrld $10,%ymm7,%ymm6 rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi vpxor %ymm5,%ymm4,%ymm4 rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx vpsrlq $17,%ymm7,%ymm7 andl %esi,%r15d vpxor %xmm8,%xmm9,%xmm9 xorl %r12d,%r14d xorl %eax,%r15d vpaddd %ymm4,%ymm0,%ymm0 xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d vpxor %ymm7,%ymm6,%ymm6 addl 8+128(%rsp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d vpsrlq $2,%ymm7,%ymm7 rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d vpxor %ymm7,%ymm6,%ymm6 andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d vpshufd $132,%ymm6,%ymm6 leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d vpsrldq $8,%ymm6,%ymm6 rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d vpaddd %ymm6,%ymm0,%ymm0 rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx vpshufd $80,%ymm0,%ymm7 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi vpsrld $10,%ymm7,%ymm6 xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d vpsrlq $17,%ymm7,%ymm7 addl 12+128(%rsp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d vpxor %ymm7,%ymm6,%ymm6 rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d vpsrlq $2,%ymm7,%ymm7 andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d vpxor %ymm7,%ymm6,%ymm6 leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi vpshufd $232,%ymm6,%ymm6 rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi vpslldq $8,%ymm6,%ymm6 rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax vpaddd %ymm6,%ymm0,%ymm0 andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d vpaddd 0(%rbp),%ymm0,%ymm6 xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d vmovdqa %ymm6,0(%rsp) vpalignr $4,%ymm1,%ymm2,%ymm4 addl 32+128(%rsp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d vpalignr $4,%ymm3,%ymm0,%ymm7 rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx vpsrld $7,%ymm4,%ymm6 andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d vpaddd %ymm7,%ymm1,%ymm1 leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d vpsrld $3,%ymm4,%ymm7 rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d vpslld $14,%ymm4,%ymm5 rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d vpxor %ymm6,%ymm7,%ymm4 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi vpshufd $250,%ymm0,%ymm7 xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d vpsrld $11,%ymm6,%ymm6 addl 36+128(%rsp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d vpxor %ymm5,%ymm4,%ymm4 rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx vpslld $11,%ymm5,%ymm5 andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d vpxor %ymm6,%ymm4,%ymm4 leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi vpsrld $10,%ymm7,%ymm6 rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi vpxor %ymm5,%ymm4,%ymm4 rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d vpsrlq $17,%ymm7,%ymm7 andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d vpaddd %ymm4,%ymm1,%ymm1 xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d vpxor %ymm7,%ymm6,%ymm6 addl 40+128(%rsp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d vpsrlq $2,%ymm7,%ymm7 rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx vpxor %ymm7,%ymm6,%ymm6 andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d vpshufd $132,%ymm6,%ymm6 leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d vpsrldq $8,%ymm6,%ymm6 rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d vpaddd %ymm6,%ymm1,%ymm1 rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d vpshufd $80,%ymm1,%ymm7 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi vpsrld $10,%ymm7,%ymm6 xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d vpsrlq $17,%ymm7,%ymm7 addl 44+128(%rsp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d vpxor %ymm7,%ymm6,%ymm6 rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax vpsrlq $2,%ymm7,%ymm7 andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d vpxor %ymm7,%ymm6,%ymm6 leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi vpshufd $232,%ymm6,%ymm6 rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi vpslldq $8,%ymm6,%ymm6 rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d vpaddd %ymm6,%ymm1,%ymm1 andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d vpaddd 32(%rbp),%ymm1,%ymm6 xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d vmovdqa %ymm6,32(%rsp) leaq -64(%rsp),%rsp .cfi_escape 0x0f,0x05,0x77,0x38,0x06,0x23,0x08 pushq 64-8(%rsp) .cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08 leaq 8(%rsp),%rsp .cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpalignr $4,%ymm2,%ymm3,%ymm4 addl 0+128(%rsp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d vpalignr $4,%ymm0,%ymm1,%ymm7 rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d vpsrld $7,%ymm4,%ymm6 andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d vpaddd %ymm7,%ymm2,%ymm2 leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d vpsrld $3,%ymm4,%ymm7 rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d vpslld $14,%ymm4,%ymm5 rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx vpxor %ymm6,%ymm7,%ymm4 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi vpshufd $250,%ymm1,%ymm7 xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d vpsrld $11,%ymm6,%ymm6 addl 4+128(%rsp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d vpxor %ymm5,%ymm4,%ymm4 rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d vpslld $11,%ymm5,%ymm5 andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d vpxor %ymm6,%ymm4,%ymm4 leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi vpsrld $10,%ymm7,%ymm6 rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi vpxor %ymm5,%ymm4,%ymm4 rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx vpsrlq $17,%ymm7,%ymm7 andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %eax,%r15d vpaddd %ymm4,%ymm2,%ymm2 xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d vpxor %ymm7,%ymm6,%ymm6 addl 8+128(%rsp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d vpsrlq $2,%ymm7,%ymm7 rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d vpxor %ymm7,%ymm6,%ymm6 andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d vpshufd $132,%ymm6,%ymm6 leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d vpsrldq $8,%ymm6,%ymm6 rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d vpaddd %ymm6,%ymm2,%ymm2 rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx vpshufd $80,%ymm2,%ymm7 andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi vpsrld $10,%ymm7,%ymm6 xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d vpsrlq $17,%ymm7,%ymm7 addl 12+128(%rsp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d vpxor %ymm7,%ymm6,%ymm6 rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d vpsrlq $2,%ymm7,%ymm7 andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d vpxor %ymm7,%ymm6,%ymm6 leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi vpshufd $232,%ymm6,%ymm6 rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi vpslldq $8,%ymm6,%ymm6 rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax vpaddd %ymm6,%ymm2,%ymm2 andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d vpaddd 64(%rbp),%ymm2,%ymm6 xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d vmovdqa %ymm6,0(%rsp) vpalignr $4,%ymm3,%ymm0,%ymm4 addl 32+128(%rsp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d vpalignr $4,%ymm1,%ymm2,%ymm7 rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx vpsrld $7,%ymm4,%ymm6 andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d vpaddd %ymm7,%ymm3,%ymm3 leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d vpsrld $3,%ymm4,%ymm7 rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d vpslld $14,%ymm4,%ymm5 rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d vpxor %ymm6,%ymm7,%ymm4 andl %r15d,%esi vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi vpshufd $250,%ymm2,%ymm7 xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d vpsrld $11,%ymm6,%ymm6 addl 36+128(%rsp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d vpxor %ymm5,%ymm4,%ymm4 rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx vpslld $11,%ymm5,%ymm5 andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d vpxor %ymm6,%ymm4,%ymm4 leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi vpsrld $10,%ymm7,%ymm6 rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi vpxor %ymm5,%ymm4,%ymm4 rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d vpsrlq $17,%ymm7,%ymm7 andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d vpaddd %ymm4,%ymm3,%ymm3 xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d vpxor %ymm7,%ymm6,%ymm6 addl 40+128(%rsp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d vpsrlq $2,%ymm7,%ymm7 rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx vpxor %ymm7,%ymm6,%ymm6 andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d vpshufd $132,%ymm6,%ymm6 leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d vpsrldq $8,%ymm6,%ymm6 rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d vpaddd %ymm6,%ymm3,%ymm3 rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d vpshufd $80,%ymm3,%ymm7 andl %r15d,%esi vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi vpsrld $10,%ymm7,%ymm6 xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d vpsrlq $17,%ymm7,%ymm7 addl 44+128(%rsp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d vpxor %ymm7,%ymm6,%ymm6 rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax vpsrlq $2,%ymm7,%ymm7 andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d vpxor %ymm7,%ymm6,%ymm6 leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi vpshufd $232,%ymm6,%ymm6 rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi vpslldq $8,%ymm6,%ymm6 rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d vpaddd %ymm6,%ymm3,%ymm3 andl %esi,%r15d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d vpaddd 96(%rbp),%ymm3,%ymm6 xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d vmovdqa %ymm6,32(%rsp) vmovq %xmm15,%r13 vpextrq $1,%xmm15,%r15 vpand %xmm14,%xmm11,%xmm11 vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r15,%r13,1) leaq 16(%r13),%r13 leaq 128(%rbp),%rbp cmpb $0,3(%rbp) jne .Lavx2_00_47 vmovdqu (%r13),%xmm9 vpinsrq $0,%r13,%xmm15,%xmm15 addl 0+64(%rsp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx andl %r15d,%esi vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d addl 4+64(%rsp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx andl %esi,%r15d vpxor %xmm8,%xmm9,%xmm9 xorl %r12d,%r14d xorl %eax,%r15d xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d addl 8+64(%rsp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d addl 12+64(%rsp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d addl 32+64(%rsp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d addl 36+64(%rsp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d addl 40+64(%rsp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d addl 44+64(%rsp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d addl 0(%rsp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d addl 4(%rsp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %eax,%r15d xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d addl 8(%rsp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d addl 12(%rsp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d addl 32(%rsp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d andl %r15d,%esi vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d addl 36(%rsp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d addl 40(%rsp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d andl %r15d,%esi vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d addl 44(%rsp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d andl %esi,%r15d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d vpextrq $1,%xmm15,%r12 vmovq %xmm15,%r13 movq 552(%rsp),%r15 addl %r14d,%eax leaq 448(%rsp),%rbp vpand %xmm14,%xmm11,%xmm11 vpor %xmm11,%xmm8,%xmm8 vmovdqu %xmm8,(%r12,%r13,1) leaq 16(%r13),%r13 addl 0(%r15),%eax addl 4(%r15),%ebx addl 8(%r15),%ecx addl 12(%r15),%edx addl 16(%r15),%r8d addl 20(%r15),%r9d addl 24(%r15),%r10d addl 28(%r15),%r11d movl %eax,0(%r15) movl %ebx,4(%r15) movl %ecx,8(%r15) movl %edx,12(%r15) movl %r8d,16(%r15) movl %r9d,20(%r15) movl %r10d,24(%r15) movl %r11d,28(%r15) cmpq 80(%rbp),%r13 je .Ldone_avx2 xorl %r14d,%r14d movl %ebx,%esi movl %r9d,%r12d xorl %ecx,%esi jmp .Lower_avx2 .align 16 .Lower_avx2: vmovdqu (%r13),%xmm9 vpinsrq $0,%r13,%xmm15,%xmm15 addl 0+16(%rbp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx andl %r15d,%esi vpxor %xmm10,%xmm9,%xmm9 vmovdqu 16-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d addl 4+16(%rbp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx andl %esi,%r15d vpxor %xmm8,%xmm9,%xmm9 xorl %r12d,%r14d xorl %eax,%r15d xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d addl 8+16(%rbp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 32-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d addl 12+16(%rbp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 48-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d addl 32+16(%rbp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 64-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d addl 36+16(%rbp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 80-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d addl 40+16(%rbp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 96-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d addl 44+16(%rbp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 112-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d leaq -64(%rbp),%rbp addl 0+16(%rbp),%r11d andl %r8d,%r12d rorxl $25,%r8d,%r13d rorxl $11,%r8d,%r15d leal (%rax,%r14,1),%eax leal (%r11,%r12,1),%r11d andnl %r10d,%r8d,%r12d xorl %r15d,%r13d rorxl $6,%r8d,%r14d leal (%r11,%r12,1),%r11d xorl %r14d,%r13d movl %eax,%r15d rorxl $22,%eax,%r12d leal (%r11,%r13,1),%r11d xorl %ebx,%r15d rorxl $13,%eax,%r14d rorxl $2,%eax,%r13d leal (%rdx,%r11,1),%edx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 128-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ebx,%esi xorl %r13d,%r14d leal (%r11,%rsi,1),%r11d movl %r8d,%r12d addl 4+16(%rbp),%r10d andl %edx,%r12d rorxl $25,%edx,%r13d rorxl $11,%edx,%esi leal (%r11,%r14,1),%r11d leal (%r10,%r12,1),%r10d andnl %r9d,%edx,%r12d xorl %esi,%r13d rorxl $6,%edx,%r14d leal (%r10,%r12,1),%r10d xorl %r14d,%r13d movl %r11d,%esi rorxl $22,%r11d,%r12d leal (%r10,%r13,1),%r10d xorl %eax,%esi rorxl $13,%r11d,%r14d rorxl $2,%r11d,%r13d leal (%rcx,%r10,1),%ecx andl %esi,%r15d vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 144-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %eax,%r15d xorl %r13d,%r14d leal (%r10,%r15,1),%r10d movl %edx,%r12d addl 8+16(%rbp),%r9d andl %ecx,%r12d rorxl $25,%ecx,%r13d rorxl $11,%ecx,%r15d leal (%r10,%r14,1),%r10d leal (%r9,%r12,1),%r9d andnl %r8d,%ecx,%r12d xorl %r15d,%r13d rorxl $6,%ecx,%r14d leal (%r9,%r12,1),%r9d xorl %r14d,%r13d movl %r10d,%r15d rorxl $22,%r10d,%r12d leal (%r9,%r13,1),%r9d xorl %r11d,%r15d rorxl $13,%r10d,%r14d rorxl $2,%r10d,%r13d leal (%rbx,%r9,1),%ebx andl %r15d,%esi vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 160-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r11d,%esi xorl %r13d,%r14d leal (%r9,%rsi,1),%r9d movl %ecx,%r12d addl 12+16(%rbp),%r8d andl %ebx,%r12d rorxl $25,%ebx,%r13d rorxl $11,%ebx,%esi leal (%r9,%r14,1),%r9d leal (%r8,%r12,1),%r8d andnl %edx,%ebx,%r12d xorl %esi,%r13d rorxl $6,%ebx,%r14d leal (%r8,%r12,1),%r8d xorl %r14d,%r13d movl %r9d,%esi rorxl $22,%r9d,%r12d leal (%r8,%r13,1),%r8d xorl %r10d,%esi rorxl $13,%r9d,%r14d rorxl $2,%r9d,%r13d leal (%rax,%r8,1),%eax andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 176-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r10d,%r15d xorl %r13d,%r14d leal (%r8,%r15,1),%r8d movl %ebx,%r12d addl 32+16(%rbp),%edx andl %eax,%r12d rorxl $25,%eax,%r13d rorxl $11,%eax,%r15d leal (%r8,%r14,1),%r8d leal (%rdx,%r12,1),%edx andnl %ecx,%eax,%r12d xorl %r15d,%r13d rorxl $6,%eax,%r14d leal (%rdx,%r12,1),%edx xorl %r14d,%r13d movl %r8d,%r15d rorxl $22,%r8d,%r12d leal (%rdx,%r13,1),%edx xorl %r9d,%r15d rorxl $13,%r8d,%r14d rorxl $2,%r8d,%r13d leal (%r11,%rdx,1),%r11d andl %r15d,%esi vpand %xmm12,%xmm11,%xmm8 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 192-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r9d,%esi xorl %r13d,%r14d leal (%rdx,%rsi,1),%edx movl %eax,%r12d addl 36+16(%rbp),%ecx andl %r11d,%r12d rorxl $25,%r11d,%r13d rorxl $11,%r11d,%esi leal (%rdx,%r14,1),%edx leal (%rcx,%r12,1),%ecx andnl %ebx,%r11d,%r12d xorl %esi,%r13d rorxl $6,%r11d,%r14d leal (%rcx,%r12,1),%ecx xorl %r14d,%r13d movl %edx,%esi rorxl $22,%edx,%r12d leal (%rcx,%r13,1),%ecx xorl %r8d,%esi rorxl $13,%edx,%r14d rorxl $2,%edx,%r13d leal (%r10,%rcx,1),%r10d andl %esi,%r15d vaesenclast %xmm10,%xmm9,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 208-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %r8d,%r15d xorl %r13d,%r14d leal (%rcx,%r15,1),%ecx movl %r11d,%r12d addl 40+16(%rbp),%ebx andl %r10d,%r12d rorxl $25,%r10d,%r13d rorxl $11,%r10d,%r15d leal (%rcx,%r14,1),%ecx leal (%rbx,%r12,1),%ebx andnl %eax,%r10d,%r12d xorl %r15d,%r13d rorxl $6,%r10d,%r14d leal (%rbx,%r12,1),%ebx xorl %r14d,%r13d movl %ecx,%r15d rorxl $22,%ecx,%r12d leal (%rbx,%r13,1),%ebx xorl %edx,%r15d rorxl $13,%ecx,%r14d rorxl $2,%ecx,%r13d leal (%r9,%rbx,1),%r9d andl %r15d,%esi vpand %xmm13,%xmm11,%xmm11 vaesenc %xmm10,%xmm9,%xmm9 vmovdqu 224-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %edx,%esi xorl %r13d,%r14d leal (%rbx,%rsi,1),%ebx movl %r10d,%r12d addl 44+16(%rbp),%eax andl %r9d,%r12d rorxl $25,%r9d,%r13d rorxl $11,%r9d,%esi leal (%rbx,%r14,1),%ebx leal (%rax,%r12,1),%eax andnl %r11d,%r9d,%r12d xorl %esi,%r13d rorxl $6,%r9d,%r14d leal (%rax,%r12,1),%eax xorl %r14d,%r13d movl %ebx,%esi rorxl $22,%ebx,%r12d leal (%rax,%r13,1),%eax xorl %ecx,%esi rorxl $13,%ebx,%r14d rorxl $2,%ebx,%r13d leal (%r8,%rax,1),%r8d andl %esi,%r15d vpor %xmm11,%xmm8,%xmm8 vaesenclast %xmm10,%xmm9,%xmm11 vmovdqu 0-128(%rdi),%xmm10 xorl %r12d,%r14d xorl %ecx,%r15d xorl %r13d,%r14d leal (%rax,%r15,1),%eax movl %r9d,%r12d vmovq %xmm15,%r13 vpextrq $1,%xmm15,%r15 vpand %xmm14,%xmm11,%xmm11 vpor %xmm11,%xmm8,%xmm8 leaq -64(%rbp),%rbp vmovdqu %xmm8,(%r15,%r13,1) leaq 16(%r13),%r13 cmpq %rsp,%rbp jae .Lower_avx2 movq 552(%rsp),%r15 leaq 64(%r13),%r13 movq 560(%rsp),%rsi addl %r14d,%eax leaq 448(%rsp),%rsp addl 0(%r15),%eax addl 4(%r15),%ebx addl 8(%r15),%ecx addl 12(%r15),%edx addl 16(%r15),%r8d addl 20(%r15),%r9d addl 24(%r15),%r10d leaq (%rsi,%r13,1),%r12 addl 28(%r15),%r11d cmpq 64+16(%rsp),%r13 movl %eax,0(%r15) cmoveq %rsp,%r12 movl %ebx,4(%r15) movl %ecx,8(%r15) movl %edx,12(%r15) movl %r8d,16(%r15) movl %r9d,20(%r15) movl %r10d,24(%r15) movl %r11d,28(%r15) jbe .Loop_avx2 leaq (%rsp),%rbp .cfi_escape 0x0f,0x06,0x76,0xf8,0x00,0x06,0x23,0x08 .Ldone_avx2: movq 64+32(%rbp),%r8 movq 64+56(%rbp),%rsi .cfi_def_cfa %rsi,8 vmovdqu %xmm8,(%r8) vzeroall movq -48(%rsi),%r15 .cfi_restore %r15 movq -40(%rsi),%r14 .cfi_restore %r14 movq -32(%rsi),%r13 .cfi_restore %r13 movq -24(%rsi),%r12 .cfi_restore %r12 movq -16(%rsi),%rbp .cfi_restore %rbp movq -8(%rsi),%rbx .cfi_restore %rbx leaq (%rsi),%rsp .cfi_def_cfa_register %rsp .Lepilogue_avx2: .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha256_enc_avx2,.-aesni_cbc_sha256_enc_avx2 .type aesni_cbc_sha256_enc_shaext,@function .align 32 aesni_cbc_sha256_enc_shaext: .cfi_startproc movq 8(%rsp),%r10 leaq K256+128(%rip),%rax movdqu (%r9),%xmm1 movdqu 16(%r9),%xmm2 movdqa 512-128(%rax),%xmm3 movl 240(%rcx),%r11d subq %rdi,%rsi movups (%rcx),%xmm15 movups (%r8),%xmm6 movups 16(%rcx),%xmm4 leaq 112(%rcx),%rcx pshufd $0x1b,%xmm1,%xmm0 pshufd $0xb1,%xmm1,%xmm1 pshufd $0x1b,%xmm2,%xmm2 movdqa %xmm3,%xmm7 .byte 102,15,58,15,202,8 punpcklqdq %xmm0,%xmm2 jmp .Loop_shaext .align 16 .Loop_shaext: movdqu (%r10),%xmm10 movdqu 16(%r10),%xmm11 movdqu 32(%r10),%xmm12 .byte 102,68,15,56,0,211 movdqu 48(%r10),%xmm13 movdqa 0-128(%rax),%xmm0 paddd %xmm10,%xmm0 .byte 102,68,15,56,0,219 movdqa %xmm2,%xmm9 movdqa %xmm1,%xmm8 movups 0(%rdi),%xmm14 xorps %xmm15,%xmm14 xorps %xmm14,%xmm6 movups -80(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups -64(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 32-128(%rax),%xmm0 paddd %xmm11,%xmm0 .byte 102,68,15,56,0,227 leaq 64(%r10),%r10 movups -48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups -32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 64-128(%rax),%xmm0 paddd %xmm12,%xmm0 .byte 102,68,15,56,0,235 .byte 69,15,56,204,211 movups -16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm13,%xmm3 .byte 102,65,15,58,15,220,4 paddd %xmm3,%xmm10 movups 0(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 96-128(%rax),%xmm0 paddd %xmm13,%xmm0 .byte 69,15,56,205,213 .byte 69,15,56,204,220 movups 16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups 32(%rcx),%xmm4 aesenc %xmm5,%xmm6 movdqa %xmm10,%xmm3 .byte 102,65,15,58,15,221,4 paddd %xmm3,%xmm11 .byte 15,56,203,202 movdqa 128-128(%rax),%xmm0 paddd %xmm10,%xmm0 .byte 69,15,56,205,218 .byte 69,15,56,204,229 movups 48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm11,%xmm3 .byte 102,65,15,58,15,218,4 paddd %xmm3,%xmm12 cmpl $11,%r11d jb .Laesenclast1 movups 64(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 80(%rcx),%xmm5 aesenc %xmm4,%xmm6 je .Laesenclast1 movups 96(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 112(%rcx),%xmm5 aesenc %xmm4,%xmm6 .Laesenclast1: aesenclast %xmm5,%xmm6 movups 16-112(%rcx),%xmm4 nop .byte 15,56,203,202 movups 16(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm6,0(%rsi,%rdi,1) xorps %xmm14,%xmm6 movups -80(%rcx),%xmm5 aesenc %xmm4,%xmm6 movdqa 160-128(%rax),%xmm0 paddd %xmm11,%xmm0 .byte 69,15,56,205,227 .byte 69,15,56,204,234 movups -64(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm12,%xmm3 .byte 102,65,15,58,15,219,4 paddd %xmm3,%xmm13 movups -48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 192-128(%rax),%xmm0 paddd %xmm12,%xmm0 .byte 69,15,56,205,236 .byte 69,15,56,204,211 movups -32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm13,%xmm3 .byte 102,65,15,58,15,220,4 paddd %xmm3,%xmm10 movups -16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 224-128(%rax),%xmm0 paddd %xmm13,%xmm0 .byte 69,15,56,205,213 .byte 69,15,56,204,220 movups 0(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm10,%xmm3 .byte 102,65,15,58,15,221,4 paddd %xmm3,%xmm11 movups 16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 256-128(%rax),%xmm0 paddd %xmm10,%xmm0 .byte 69,15,56,205,218 .byte 69,15,56,204,229 movups 32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm11,%xmm3 .byte 102,65,15,58,15,218,4 paddd %xmm3,%xmm12 movups 48(%rcx),%xmm5 aesenc %xmm4,%xmm6 cmpl $11,%r11d jb .Laesenclast2 movups 64(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 80(%rcx),%xmm5 aesenc %xmm4,%xmm6 je .Laesenclast2 movups 96(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 112(%rcx),%xmm5 aesenc %xmm4,%xmm6 .Laesenclast2: aesenclast %xmm5,%xmm6 movups 16-112(%rcx),%xmm4 nop .byte 15,56,203,202 movups 32(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm6,16(%rsi,%rdi,1) xorps %xmm14,%xmm6 movups -80(%rcx),%xmm5 aesenc %xmm4,%xmm6 movdqa 288-128(%rax),%xmm0 paddd %xmm11,%xmm0 .byte 69,15,56,205,227 .byte 69,15,56,204,234 movups -64(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm12,%xmm3 .byte 102,65,15,58,15,219,4 paddd %xmm3,%xmm13 movups -48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 320-128(%rax),%xmm0 paddd %xmm12,%xmm0 .byte 69,15,56,205,236 .byte 69,15,56,204,211 movups -32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm13,%xmm3 .byte 102,65,15,58,15,220,4 paddd %xmm3,%xmm10 movups -16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 352-128(%rax),%xmm0 paddd %xmm13,%xmm0 .byte 69,15,56,205,213 .byte 69,15,56,204,220 movups 0(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm10,%xmm3 .byte 102,65,15,58,15,221,4 paddd %xmm3,%xmm11 movups 16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 384-128(%rax),%xmm0 paddd %xmm10,%xmm0 .byte 69,15,56,205,218 .byte 69,15,56,204,229 movups 32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm11,%xmm3 .byte 102,65,15,58,15,218,4 paddd %xmm3,%xmm12 movups 48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movdqa 416-128(%rax),%xmm0 paddd %xmm11,%xmm0 .byte 69,15,56,205,227 .byte 69,15,56,204,234 cmpl $11,%r11d jb .Laesenclast3 movups 64(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 80(%rcx),%xmm5 aesenc %xmm4,%xmm6 je .Laesenclast3 movups 96(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 112(%rcx),%xmm5 aesenc %xmm4,%xmm6 .Laesenclast3: aesenclast %xmm5,%xmm6 movups 16-112(%rcx),%xmm4 nop .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movdqa %xmm12,%xmm3 .byte 102,65,15,58,15,219,4 paddd %xmm3,%xmm13 movups 48(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm6,32(%rsi,%rdi,1) xorps %xmm14,%xmm6 movups -80(%rcx),%xmm5 aesenc %xmm4,%xmm6 movups -64(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 448-128(%rax),%xmm0 paddd %xmm12,%xmm0 .byte 69,15,56,205,236 movdqa %xmm7,%xmm3 movups -48(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups -32(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,202 movdqa 480-128(%rax),%xmm0 paddd %xmm13,%xmm0 movups -16(%rcx),%xmm5 aesenc %xmm4,%xmm6 movups 0(%rcx),%xmm4 aesenc %xmm5,%xmm6 .byte 15,56,203,209 pshufd $0x0e,%xmm0,%xmm0 movups 16(%rcx),%xmm5 aesenc %xmm4,%xmm6 .byte 15,56,203,202 movups 32(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 48(%rcx),%xmm5 aesenc %xmm4,%xmm6 cmpl $11,%r11d jb .Laesenclast4 movups 64(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 80(%rcx),%xmm5 aesenc %xmm4,%xmm6 je .Laesenclast4 movups 96(%rcx),%xmm4 aesenc %xmm5,%xmm6 movups 112(%rcx),%xmm5 aesenc %xmm4,%xmm6 .Laesenclast4: aesenclast %xmm5,%xmm6 movups 16-112(%rcx),%xmm4 nop paddd %xmm9,%xmm2 paddd %xmm8,%xmm1 decq %rdx movups %xmm6,48(%rsi,%rdi,1) leaq 64(%rdi),%rdi jnz .Loop_shaext pshufd $0xb1,%xmm2,%xmm2 pshufd $0x1b,%xmm1,%xmm3 pshufd $0xb1,%xmm1,%xmm1 punpckhqdq %xmm2,%xmm1 .byte 102,15,58,15,211,8 movups %xmm6,(%r8) movdqu %xmm1,(%r9) movdqu %xmm2,16(%r9) .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha256_enc_shaext,.-aesni_cbc_sha256_enc_shaext #endif
marvin-hansen/iggy-streaming-system
67,967
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .section .rodata .align 16 one: .quad 1,0 two: .quad 2,0 three: .quad 3,0 four: .quad 4,0 five: .quad 5,0 six: .quad 6,0 seven: .quad 7,0 eight: .quad 8,0 OR_MASK: .long 0x00000000,0x00000000,0x00000000,0x80000000 poly: .quad 0x1, 0xc200000000000000 mask: .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d con1: .long 1,1,1,1 con2: .long 0x1b,0x1b,0x1b,0x1b con3: .byte -1,-1,-1,-1,-1,-1,-1,-1,4,5,6,7,4,5,6,7 and_mask: .long 0,0xffffffff, 0xffffffff, 0xffffffff .text .type GFMUL,@function .align 16 GFMUL: .cfi_startproc vpclmulqdq $0x00,%xmm1,%xmm0,%xmm2 vpclmulqdq $0x11,%xmm1,%xmm0,%xmm5 vpclmulqdq $0x10,%xmm1,%xmm0,%xmm3 vpclmulqdq $0x01,%xmm1,%xmm0,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $8,%xmm3,%xmm4 vpsrldq $8,%xmm3,%xmm3 vpxor %xmm4,%xmm2,%xmm2 vpxor %xmm3,%xmm5,%xmm5 vpclmulqdq $0x10,poly(%rip),%xmm2,%xmm3 vpshufd $78,%xmm2,%xmm4 vpxor %xmm4,%xmm3,%xmm2 vpclmulqdq $0x10,poly(%rip),%xmm2,%xmm3 vpshufd $78,%xmm2,%xmm4 vpxor %xmm4,%xmm3,%xmm2 vpxor %xmm5,%xmm2,%xmm0 .byte 0xf3,0xc3 .cfi_endproc .size GFMUL, .-GFMUL .globl aesgcmsiv_htable_init .hidden aesgcmsiv_htable_init .type aesgcmsiv_htable_init,@function .align 16 aesgcmsiv_htable_init: .cfi_startproc _CET_ENDBR vmovdqa (%rsi),%xmm0 vmovdqa %xmm0,%xmm1 vmovdqa %xmm0,(%rdi) call GFMUL vmovdqa %xmm0,16(%rdi) call GFMUL vmovdqa %xmm0,32(%rdi) call GFMUL vmovdqa %xmm0,48(%rdi) call GFMUL vmovdqa %xmm0,64(%rdi) call GFMUL vmovdqa %xmm0,80(%rdi) call GFMUL vmovdqa %xmm0,96(%rdi) call GFMUL vmovdqa %xmm0,112(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size aesgcmsiv_htable_init, .-aesgcmsiv_htable_init .globl aesgcmsiv_htable6_init .hidden aesgcmsiv_htable6_init .type aesgcmsiv_htable6_init,@function .align 16 aesgcmsiv_htable6_init: .cfi_startproc _CET_ENDBR vmovdqa (%rsi),%xmm0 vmovdqa %xmm0,%xmm1 vmovdqa %xmm0,(%rdi) call GFMUL vmovdqa %xmm0,16(%rdi) call GFMUL vmovdqa %xmm0,32(%rdi) call GFMUL vmovdqa %xmm0,48(%rdi) call GFMUL vmovdqa %xmm0,64(%rdi) call GFMUL vmovdqa %xmm0,80(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size aesgcmsiv_htable6_init, .-aesgcmsiv_htable6_init .globl aesgcmsiv_htable_polyval .hidden aesgcmsiv_htable_polyval .type aesgcmsiv_htable_polyval,@function .align 16 aesgcmsiv_htable_polyval: .cfi_startproc _CET_ENDBR testq %rdx,%rdx jnz .Lhtable_polyval_start .byte 0xf3,0xc3 .Lhtable_polyval_start: vzeroall movq %rdx,%r11 andq $127,%r11 jz .Lhtable_polyval_no_prefix vpxor %xmm9,%xmm9,%xmm9 vmovdqa (%rcx),%xmm1 subq %r11,%rdx subq $16,%r11 vmovdqu (%rsi),%xmm0 vpxor %xmm1,%xmm0,%xmm0 vpclmulqdq $0x01,(%rdi,%r11,1),%xmm0,%xmm5 vpclmulqdq $0x00,(%rdi,%r11,1),%xmm0,%xmm3 vpclmulqdq $0x11,(%rdi,%r11,1),%xmm0,%xmm4 vpclmulqdq $0x10,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 leaq 16(%rsi),%rsi testq %r11,%r11 jnz .Lhtable_polyval_prefix_loop jmp .Lhtable_polyval_prefix_complete .align 64 .Lhtable_polyval_prefix_loop: subq $16,%r11 vmovdqu (%rsi),%xmm0 vpclmulqdq $0x00,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x01,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x10,(%rdi,%r11,1),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 testq %r11,%r11 leaq 16(%rsi),%rsi jnz .Lhtable_polyval_prefix_loop .Lhtable_polyval_prefix_complete: vpsrldq $8,%xmm5,%xmm6 vpslldq $8,%xmm5,%xmm5 vpxor %xmm6,%xmm4,%xmm9 vpxor %xmm5,%xmm3,%xmm1 jmp .Lhtable_polyval_main_loop .Lhtable_polyval_no_prefix: vpxor %xmm1,%xmm1,%xmm1 vmovdqa (%rcx),%xmm9 .align 64 .Lhtable_polyval_main_loop: subq $0x80,%rdx jb .Lhtable_polyval_out vmovdqu 112(%rsi),%xmm0 vpclmulqdq $0x01,(%rdi),%xmm0,%xmm5 vpclmulqdq $0x00,(%rdi),%xmm0,%xmm3 vpclmulqdq $0x11,(%rdi),%xmm0,%xmm4 vpclmulqdq $0x10,(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vmovdqu 96(%rsi),%xmm0 vpclmulqdq $0x01,16(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,16(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,16(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,16(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vmovdqu 80(%rsi),%xmm0 vpclmulqdq $0x10,poly(%rip),%xmm1,%xmm7 vpalignr $8,%xmm1,%xmm1,%xmm1 vpclmulqdq $0x01,32(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,32(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,32(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,32(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpxor %xmm7,%xmm1,%xmm1 vmovdqu 64(%rsi),%xmm0 vpclmulqdq $0x01,48(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,48(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,48(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,48(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vmovdqu 48(%rsi),%xmm0 vpclmulqdq $0x10,poly(%rip),%xmm1,%xmm7 vpalignr $8,%xmm1,%xmm1,%xmm1 vpclmulqdq $0x01,64(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,64(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,64(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,64(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpxor %xmm7,%xmm1,%xmm1 vmovdqu 32(%rsi),%xmm0 vpclmulqdq $0x01,80(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,80(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,80(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,80(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpxor %xmm9,%xmm1,%xmm1 vmovdqu 16(%rsi),%xmm0 vpclmulqdq $0x01,96(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,96(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,96(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,96(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vmovdqu 0(%rsi),%xmm0 vpxor %xmm1,%xmm0,%xmm0 vpclmulqdq $0x01,112(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpclmulqdq $0x00,112(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm3,%xmm3 vpclmulqdq $0x11,112(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm4,%xmm4 vpclmulqdq $0x10,112(%rdi),%xmm0,%xmm6 vpxor %xmm6,%xmm5,%xmm5 vpsrldq $8,%xmm5,%xmm6 vpslldq $8,%xmm5,%xmm5 vpxor %xmm6,%xmm4,%xmm9 vpxor %xmm5,%xmm3,%xmm1 leaq 128(%rsi),%rsi jmp .Lhtable_polyval_main_loop .Lhtable_polyval_out: vpclmulqdq $0x10,poly(%rip),%xmm1,%xmm6 vpalignr $8,%xmm1,%xmm1,%xmm1 vpxor %xmm6,%xmm1,%xmm1 vpclmulqdq $0x10,poly(%rip),%xmm1,%xmm6 vpalignr $8,%xmm1,%xmm1,%xmm1 vpxor %xmm6,%xmm1,%xmm1 vpxor %xmm9,%xmm1,%xmm1 vmovdqu %xmm1,(%rcx) vzeroupper .byte 0xf3,0xc3 .cfi_endproc .size aesgcmsiv_htable_polyval,.-aesgcmsiv_htable_polyval .globl aesgcmsiv_polyval_horner .hidden aesgcmsiv_polyval_horner .type aesgcmsiv_polyval_horner,@function .align 16 aesgcmsiv_polyval_horner: .cfi_startproc _CET_ENDBR testq %rcx,%rcx jnz .Lpolyval_horner_start .byte 0xf3,0xc3 .Lpolyval_horner_start: xorq %r10,%r10 shlq $4,%rcx vmovdqa (%rsi),%xmm1 vmovdqa (%rdi),%xmm0 .Lpolyval_horner_loop: vpxor (%rdx,%r10,1),%xmm0,%xmm0 call GFMUL addq $16,%r10 cmpq %r10,%rcx jne .Lpolyval_horner_loop vmovdqa %xmm0,(%rdi) .byte 0xf3,0xc3 .cfi_endproc .size aesgcmsiv_polyval_horner,.-aesgcmsiv_polyval_horner .globl aes128gcmsiv_aes_ks .hidden aes128gcmsiv_aes_ks .type aes128gcmsiv_aes_ks,@function .align 16 aes128gcmsiv_aes_ks: .cfi_startproc _CET_ENDBR vmovdqu (%rdi),%xmm1 vmovdqa %xmm1,(%rsi) vmovdqa con1(%rip),%xmm0 vmovdqa mask(%rip),%xmm15 movq $8,%rax .Lks128_loop: addq $16,%rsi subq $1,%rax vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,(%rsi) jne .Lks128_loop vmovdqa con2(%rip),%xmm0 vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,16(%rsi) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslldq $4,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpslldq $4,%xmm3,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,32(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size aes128gcmsiv_aes_ks,.-aes128gcmsiv_aes_ks .globl aes256gcmsiv_aes_ks .hidden aes256gcmsiv_aes_ks .type aes256gcmsiv_aes_ks,@function .align 16 aes256gcmsiv_aes_ks: .cfi_startproc _CET_ENDBR vmovdqu (%rdi),%xmm1 vmovdqu 16(%rdi),%xmm3 vmovdqa %xmm1,(%rsi) vmovdqa %xmm3,16(%rsi) vmovdqa con1(%rip),%xmm0 vmovdqa mask(%rip),%xmm15 vpxor %xmm14,%xmm14,%xmm14 movq $6,%rax .Lks256_loop: addq $32,%rsi subq $1,%rax vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,(%rsi) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpsllq $32,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpshufb con3(%rip),%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vmovdqa %xmm3,16(%rsi) jne .Lks256_loop vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpsllq $32,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm1,32(%rsi) .byte 0xf3,0xc3 .cfi_endproc .globl aes128gcmsiv_aes_ks_enc_x1 .hidden aes128gcmsiv_aes_ks_enc_x1 .type aes128gcmsiv_aes_ks_enc_x1,@function .align 16 aes128gcmsiv_aes_ks_enc_x1: .cfi_startproc _CET_ENDBR vmovdqa (%rcx),%xmm1 vmovdqa 0(%rdi),%xmm4 vmovdqa %xmm1,(%rdx) vpxor %xmm1,%xmm4,%xmm4 vmovdqa con1(%rip),%xmm0 vmovdqa mask(%rip),%xmm15 vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,16(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,32(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,48(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,64(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,80(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,96(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,112(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,128(%rdx) vmovdqa con2(%rip),%xmm0 vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,144(%rdx) vpshufb %xmm15,%xmm1,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpsllq $32,%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpshufb con3(%rip),%xmm1,%xmm3 vpxor %xmm3,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenclast %xmm1,%xmm4,%xmm4 vmovdqa %xmm1,160(%rdx) vmovdqa %xmm4,0(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size aes128gcmsiv_aes_ks_enc_x1,.-aes128gcmsiv_aes_ks_enc_x1 .globl aes128gcmsiv_kdf .hidden aes128gcmsiv_kdf .type aes128gcmsiv_kdf,@function .align 16 aes128gcmsiv_kdf: .cfi_startproc _CET_ENDBR vmovdqa (%rdx),%xmm1 vmovdqa 0(%rdi),%xmm9 vmovdqa and_mask(%rip),%xmm12 vmovdqa one(%rip),%xmm13 vpshufd $0x90,%xmm9,%xmm9 vpand %xmm12,%xmm9,%xmm9 vpaddd %xmm13,%xmm9,%xmm10 vpaddd %xmm13,%xmm10,%xmm11 vpaddd %xmm13,%xmm11,%xmm12 vpxor %xmm1,%xmm9,%xmm9 vpxor %xmm1,%xmm10,%xmm10 vpxor %xmm1,%xmm11,%xmm11 vpxor %xmm1,%xmm12,%xmm12 vmovdqa 16(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 32(%rdx),%xmm2 vaesenc %xmm2,%xmm9,%xmm9 vaesenc %xmm2,%xmm10,%xmm10 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vmovdqa 48(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 64(%rdx),%xmm2 vaesenc %xmm2,%xmm9,%xmm9 vaesenc %xmm2,%xmm10,%xmm10 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vmovdqa 80(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 96(%rdx),%xmm2 vaesenc %xmm2,%xmm9,%xmm9 vaesenc %xmm2,%xmm10,%xmm10 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vmovdqa 112(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 128(%rdx),%xmm2 vaesenc %xmm2,%xmm9,%xmm9 vaesenc %xmm2,%xmm10,%xmm10 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vmovdqa 144(%rdx),%xmm1 vaesenc %xmm1,%xmm9,%xmm9 vaesenc %xmm1,%xmm10,%xmm10 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vmovdqa 160(%rdx),%xmm2 vaesenclast %xmm2,%xmm9,%xmm9 vaesenclast %xmm2,%xmm10,%xmm10 vaesenclast %xmm2,%xmm11,%xmm11 vaesenclast %xmm2,%xmm12,%xmm12 vmovdqa %xmm9,0(%rsi) vmovdqa %xmm10,16(%rsi) vmovdqa %xmm11,32(%rsi) vmovdqa %xmm12,48(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size aes128gcmsiv_kdf,.-aes128gcmsiv_kdf .globl aes128gcmsiv_enc_msg_x4 .hidden aes128gcmsiv_enc_msg_x4 .type aes128gcmsiv_enc_msg_x4,@function .align 16 aes128gcmsiv_enc_msg_x4: .cfi_startproc _CET_ENDBR testq %r8,%r8 jnz .L128_enc_msg_x4_start .byte 0xf3,0xc3 .L128_enc_msg_x4_start: pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-16 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-24 shrq $4,%r8 movq %r8,%r10 shlq $62,%r10 shrq $62,%r10 vmovdqa (%rdx),%xmm15 vpor OR_MASK(%rip),%xmm15,%xmm15 vmovdqu four(%rip),%xmm4 vmovdqa %xmm15,%xmm0 vpaddd one(%rip),%xmm15,%xmm1 vpaddd two(%rip),%xmm15,%xmm2 vpaddd three(%rip),%xmm15,%xmm3 shrq $2,%r8 je .L128_enc_msg_x4_check_remainder subq $64,%rsi subq $64,%rdi .L128_enc_msg_x4_loop1: addq $64,%rsi addq $64,%rdi vmovdqa %xmm0,%xmm5 vmovdqa %xmm1,%xmm6 vmovdqa %xmm2,%xmm7 vmovdqa %xmm3,%xmm8 vpxor (%rcx),%xmm5,%xmm5 vpxor (%rcx),%xmm6,%xmm6 vpxor (%rcx),%xmm7,%xmm7 vpxor (%rcx),%xmm8,%xmm8 vmovdqu 16(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm0,%xmm0 vmovdqu 32(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm1,%xmm1 vmovdqu 48(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm2,%xmm2 vmovdqu 64(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm3,%xmm3 vmovdqu 80(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 96(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 112(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 128(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 144(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 160(%rcx),%xmm12 vaesenclast %xmm12,%xmm5,%xmm5 vaesenclast %xmm12,%xmm6,%xmm6 vaesenclast %xmm12,%xmm7,%xmm7 vaesenclast %xmm12,%xmm8,%xmm8 vpxor 0(%rdi),%xmm5,%xmm5 vpxor 16(%rdi),%xmm6,%xmm6 vpxor 32(%rdi),%xmm7,%xmm7 vpxor 48(%rdi),%xmm8,%xmm8 subq $1,%r8 vmovdqu %xmm5,0(%rsi) vmovdqu %xmm6,16(%rsi) vmovdqu %xmm7,32(%rsi) vmovdqu %xmm8,48(%rsi) jne .L128_enc_msg_x4_loop1 addq $64,%rsi addq $64,%rdi .L128_enc_msg_x4_check_remainder: cmpq $0,%r10 je .L128_enc_msg_x4_out .L128_enc_msg_x4_loop2: vmovdqa %xmm0,%xmm5 vpaddd one(%rip),%xmm0,%xmm0 vpxor (%rcx),%xmm5,%xmm5 vaesenc 16(%rcx),%xmm5,%xmm5 vaesenc 32(%rcx),%xmm5,%xmm5 vaesenc 48(%rcx),%xmm5,%xmm5 vaesenc 64(%rcx),%xmm5,%xmm5 vaesenc 80(%rcx),%xmm5,%xmm5 vaesenc 96(%rcx),%xmm5,%xmm5 vaesenc 112(%rcx),%xmm5,%xmm5 vaesenc 128(%rcx),%xmm5,%xmm5 vaesenc 144(%rcx),%xmm5,%xmm5 vaesenclast 160(%rcx),%xmm5,%xmm5 vpxor (%rdi),%xmm5,%xmm5 vmovdqu %xmm5,(%rsi) addq $16,%rdi addq $16,%rsi subq $1,%r10 jne .L128_enc_msg_x4_loop2 .L128_enc_msg_x4_out: popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 .byte 0xf3,0xc3 .cfi_endproc .size aes128gcmsiv_enc_msg_x4,.-aes128gcmsiv_enc_msg_x4 .globl aes128gcmsiv_enc_msg_x8 .hidden aes128gcmsiv_enc_msg_x8 .type aes128gcmsiv_enc_msg_x8,@function .align 16 aes128gcmsiv_enc_msg_x8: .cfi_startproc _CET_ENDBR testq %r8,%r8 jnz .L128_enc_msg_x8_start .byte 0xf3,0xc3 .L128_enc_msg_x8_start: pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-16 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-24 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-32 movq %rsp,%rbp .cfi_def_cfa_register rbp subq $128,%rsp andq $-64,%rsp shrq $4,%r8 movq %r8,%r10 shlq $61,%r10 shrq $61,%r10 vmovdqu (%rdx),%xmm1 vpor OR_MASK(%rip),%xmm1,%xmm1 vpaddd seven(%rip),%xmm1,%xmm0 vmovdqu %xmm0,(%rsp) vpaddd one(%rip),%xmm1,%xmm9 vpaddd two(%rip),%xmm1,%xmm10 vpaddd three(%rip),%xmm1,%xmm11 vpaddd four(%rip),%xmm1,%xmm12 vpaddd five(%rip),%xmm1,%xmm13 vpaddd six(%rip),%xmm1,%xmm14 vmovdqa %xmm1,%xmm0 shrq $3,%r8 je .L128_enc_msg_x8_check_remainder subq $128,%rsi subq $128,%rdi .L128_enc_msg_x8_loop1: addq $128,%rsi addq $128,%rdi vmovdqa %xmm0,%xmm1 vmovdqa %xmm9,%xmm2 vmovdqa %xmm10,%xmm3 vmovdqa %xmm11,%xmm4 vmovdqa %xmm12,%xmm5 vmovdqa %xmm13,%xmm6 vmovdqa %xmm14,%xmm7 vmovdqu (%rsp),%xmm8 vpxor (%rcx),%xmm1,%xmm1 vpxor (%rcx),%xmm2,%xmm2 vpxor (%rcx),%xmm3,%xmm3 vpxor (%rcx),%xmm4,%xmm4 vpxor (%rcx),%xmm5,%xmm5 vpxor (%rcx),%xmm6,%xmm6 vpxor (%rcx),%xmm7,%xmm7 vpxor (%rcx),%xmm8,%xmm8 vmovdqu 16(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu (%rsp),%xmm14 vpaddd eight(%rip),%xmm14,%xmm14 vmovdqu %xmm14,(%rsp) vmovdqu 32(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpsubd one(%rip),%xmm14,%xmm14 vmovdqu 48(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm0,%xmm0 vmovdqu 64(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm9,%xmm9 vmovdqu 80(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm10,%xmm10 vmovdqu 96(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm11,%xmm11 vmovdqu 112(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm12,%xmm12 vmovdqu 128(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm13,%xmm13 vmovdqu 144(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 160(%rcx),%xmm15 vaesenclast %xmm15,%xmm1,%xmm1 vaesenclast %xmm15,%xmm2,%xmm2 vaesenclast %xmm15,%xmm3,%xmm3 vaesenclast %xmm15,%xmm4,%xmm4 vaesenclast %xmm15,%xmm5,%xmm5 vaesenclast %xmm15,%xmm6,%xmm6 vaesenclast %xmm15,%xmm7,%xmm7 vaesenclast %xmm15,%xmm8,%xmm8 vpxor 0(%rdi),%xmm1,%xmm1 vpxor 16(%rdi),%xmm2,%xmm2 vpxor 32(%rdi),%xmm3,%xmm3 vpxor 48(%rdi),%xmm4,%xmm4 vpxor 64(%rdi),%xmm5,%xmm5 vpxor 80(%rdi),%xmm6,%xmm6 vpxor 96(%rdi),%xmm7,%xmm7 vpxor 112(%rdi),%xmm8,%xmm8 decq %r8 vmovdqu %xmm1,0(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) vmovdqu %xmm7,96(%rsi) vmovdqu %xmm8,112(%rsi) jne .L128_enc_msg_x8_loop1 addq $128,%rsi addq $128,%rdi .L128_enc_msg_x8_check_remainder: cmpq $0,%r10 je .L128_enc_msg_x8_out .L128_enc_msg_x8_loop2: vmovdqa %xmm0,%xmm1 vpaddd one(%rip),%xmm0,%xmm0 vpxor (%rcx),%xmm1,%xmm1 vaesenc 16(%rcx),%xmm1,%xmm1 vaesenc 32(%rcx),%xmm1,%xmm1 vaesenc 48(%rcx),%xmm1,%xmm1 vaesenc 64(%rcx),%xmm1,%xmm1 vaesenc 80(%rcx),%xmm1,%xmm1 vaesenc 96(%rcx),%xmm1,%xmm1 vaesenc 112(%rcx),%xmm1,%xmm1 vaesenc 128(%rcx),%xmm1,%xmm1 vaesenc 144(%rcx),%xmm1,%xmm1 vaesenclast 160(%rcx),%xmm1,%xmm1 vpxor (%rdi),%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $16,%rdi addq $16,%rsi decq %r10 jne .L128_enc_msg_x8_loop2 .L128_enc_msg_x8_out: movq %rbp,%rsp .cfi_def_cfa_register %rsp popq %rbp .cfi_adjust_cfa_offset -8 .cfi_restore %rbp popq %r13 .cfi_adjust_cfa_offset -8 .cfi_restore %r13 popq %r12 .cfi_adjust_cfa_offset -8 .cfi_restore %r12 .byte 0xf3,0xc3 .cfi_endproc .size aes128gcmsiv_enc_msg_x8,.-aes128gcmsiv_enc_msg_x8 .globl aes128gcmsiv_dec .hidden aes128gcmsiv_dec .type aes128gcmsiv_dec,@function .align 16 aes128gcmsiv_dec: .cfi_startproc _CET_ENDBR testq $~15,%r9 jnz .L128_dec_start .byte 0xf3,0xc3 .L128_dec_start: vzeroupper vmovdqa (%rdx),%xmm0 vmovdqu 16(%rdx),%xmm15 vpor OR_MASK(%rip),%xmm15,%xmm15 movq %rdx,%rax leaq 32(%rax),%rax leaq 32(%rcx),%rcx andq $~15,%r9 cmpq $96,%r9 jb .L128_dec_loop2 subq $96,%r9 vmovdqa %xmm15,%xmm7 vpaddd one(%rip),%xmm7,%xmm8 vpaddd two(%rip),%xmm7,%xmm9 vpaddd one(%rip),%xmm9,%xmm10 vpaddd two(%rip),%xmm9,%xmm11 vpaddd one(%rip),%xmm11,%xmm12 vpaddd two(%rip),%xmm11,%xmm15 vpxor (%r8),%xmm7,%xmm7 vpxor (%r8),%xmm8,%xmm8 vpxor (%r8),%xmm9,%xmm9 vpxor (%r8),%xmm10,%xmm10 vpxor (%r8),%xmm11,%xmm11 vpxor (%r8),%xmm12,%xmm12 vmovdqu 16(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 32(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 48(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 64(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 80(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 96(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 112(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 128(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 144(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 160(%r8),%xmm4 vaesenclast %xmm4,%xmm7,%xmm7 vaesenclast %xmm4,%xmm8,%xmm8 vaesenclast %xmm4,%xmm9,%xmm9 vaesenclast %xmm4,%xmm10,%xmm10 vaesenclast %xmm4,%xmm11,%xmm11 vaesenclast %xmm4,%xmm12,%xmm12 vpxor 0(%rdi),%xmm7,%xmm7 vpxor 16(%rdi),%xmm8,%xmm8 vpxor 32(%rdi),%xmm9,%xmm9 vpxor 48(%rdi),%xmm10,%xmm10 vpxor 64(%rdi),%xmm11,%xmm11 vpxor 80(%rdi),%xmm12,%xmm12 vmovdqu %xmm7,0(%rsi) vmovdqu %xmm8,16(%rsi) vmovdqu %xmm9,32(%rsi) vmovdqu %xmm10,48(%rsi) vmovdqu %xmm11,64(%rsi) vmovdqu %xmm12,80(%rsi) addq $96,%rdi addq $96,%rsi jmp .L128_dec_loop1 .align 64 .L128_dec_loop1: cmpq $96,%r9 jb .L128_dec_finish_96 subq $96,%r9 vmovdqa %xmm12,%xmm6 vmovdqa %xmm11,16-32(%rax) vmovdqa %xmm10,32-32(%rax) vmovdqa %xmm9,48-32(%rax) vmovdqa %xmm8,64-32(%rax) vmovdqa %xmm7,80-32(%rax) vmovdqa %xmm15,%xmm7 vpaddd one(%rip),%xmm7,%xmm8 vpaddd two(%rip),%xmm7,%xmm9 vpaddd one(%rip),%xmm9,%xmm10 vpaddd two(%rip),%xmm9,%xmm11 vpaddd one(%rip),%xmm11,%xmm12 vpaddd two(%rip),%xmm11,%xmm15 vmovdqa (%r8),%xmm4 vpxor %xmm4,%xmm7,%xmm7 vpxor %xmm4,%xmm8,%xmm8 vpxor %xmm4,%xmm9,%xmm9 vpxor %xmm4,%xmm10,%xmm10 vpxor %xmm4,%xmm11,%xmm11 vpxor %xmm4,%xmm12,%xmm12 vmovdqu 0-32(%rcx),%xmm4 vpclmulqdq $0x11,%xmm4,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm4,%xmm6,%xmm3 vpclmulqdq $0x01,%xmm4,%xmm6,%xmm1 vpclmulqdq $0x10,%xmm4,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 16(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu -16(%rax),%xmm6 vmovdqu -16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 32(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 0(%rax),%xmm6 vmovdqu 0(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 48(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 16(%rax),%xmm6 vmovdqu 16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 64(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 32(%rax),%xmm6 vmovdqu 32(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 80(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 96(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 112(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqa 80-32(%rax),%xmm6 vpxor %xmm0,%xmm6,%xmm6 vmovdqu 80-32(%rcx),%xmm5 vpclmulqdq $0x01,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x10,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 128(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vpsrldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm2,%xmm5 vpslldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm0 vmovdqa poly(%rip),%xmm3 vmovdqu 144(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 160(%r8),%xmm6 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpxor 0(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm7,%xmm7 vpxor 16(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm8,%xmm8 vpxor 32(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm9,%xmm9 vpxor 48(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm10,%xmm10 vpxor 64(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm11,%xmm11 vpxor 80(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm12,%xmm12 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vmovdqu %xmm7,0(%rsi) vmovdqu %xmm8,16(%rsi) vmovdqu %xmm9,32(%rsi) vmovdqu %xmm10,48(%rsi) vmovdqu %xmm11,64(%rsi) vmovdqu %xmm12,80(%rsi) vpxor %xmm5,%xmm0,%xmm0 leaq 96(%rdi),%rdi leaq 96(%rsi),%rsi jmp .L128_dec_loop1 .L128_dec_finish_96: vmovdqa %xmm12,%xmm6 vmovdqa %xmm11,16-32(%rax) vmovdqa %xmm10,32-32(%rax) vmovdqa %xmm9,48-32(%rax) vmovdqa %xmm8,64-32(%rax) vmovdqa %xmm7,80-32(%rax) vmovdqu 0-32(%rcx),%xmm4 vpclmulqdq $0x10,%xmm4,%xmm6,%xmm1 vpclmulqdq $0x11,%xmm4,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm4,%xmm6,%xmm3 vpclmulqdq $0x01,%xmm4,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu -16(%rax),%xmm6 vmovdqu -16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 0(%rax),%xmm6 vmovdqu 0(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 16(%rax),%xmm6 vmovdqu 16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 32(%rax),%xmm6 vmovdqu 32(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 80-32(%rax),%xmm6 vpxor %xmm0,%xmm6,%xmm6 vmovdqu 80-32(%rcx),%xmm5 vpclmulqdq $0x11,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x10,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x01,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpsrldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm2,%xmm5 vpslldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm0 vmovdqa poly(%rip),%xmm3 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpxor %xmm5,%xmm0,%xmm0 .L128_dec_loop2: cmpq $16,%r9 jb .L128_dec_out subq $16,%r9 vmovdqa %xmm15,%xmm2 vpaddd one(%rip),%xmm15,%xmm15 vpxor 0(%r8),%xmm2,%xmm2 vaesenc 16(%r8),%xmm2,%xmm2 vaesenc 32(%r8),%xmm2,%xmm2 vaesenc 48(%r8),%xmm2,%xmm2 vaesenc 64(%r8),%xmm2,%xmm2 vaesenc 80(%r8),%xmm2,%xmm2 vaesenc 96(%r8),%xmm2,%xmm2 vaesenc 112(%r8),%xmm2,%xmm2 vaesenc 128(%r8),%xmm2,%xmm2 vaesenc 144(%r8),%xmm2,%xmm2 vaesenclast 160(%r8),%xmm2,%xmm2 vpxor (%rdi),%xmm2,%xmm2 vmovdqu %xmm2,(%rsi) addq $16,%rdi addq $16,%rsi vpxor %xmm2,%xmm0,%xmm0 vmovdqa -32(%rcx),%xmm1 call GFMUL jmp .L128_dec_loop2 .L128_dec_out: vmovdqu %xmm0,(%rdx) .byte 0xf3,0xc3 .cfi_endproc .size aes128gcmsiv_dec, .-aes128gcmsiv_dec .globl aes128gcmsiv_ecb_enc_block .hidden aes128gcmsiv_ecb_enc_block .type aes128gcmsiv_ecb_enc_block,@function .align 16 aes128gcmsiv_ecb_enc_block: .cfi_startproc _CET_ENDBR vmovdqa (%rdi),%xmm1 vpxor (%rdx),%xmm1,%xmm1 vaesenc 16(%rdx),%xmm1,%xmm1 vaesenc 32(%rdx),%xmm1,%xmm1 vaesenc 48(%rdx),%xmm1,%xmm1 vaesenc 64(%rdx),%xmm1,%xmm1 vaesenc 80(%rdx),%xmm1,%xmm1 vaesenc 96(%rdx),%xmm1,%xmm1 vaesenc 112(%rdx),%xmm1,%xmm1 vaesenc 128(%rdx),%xmm1,%xmm1 vaesenc 144(%rdx),%xmm1,%xmm1 vaesenclast 160(%rdx),%xmm1,%xmm1 vmovdqa %xmm1,(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size aes128gcmsiv_ecb_enc_block,.-aes128gcmsiv_ecb_enc_block .globl aes256gcmsiv_aes_ks_enc_x1 .hidden aes256gcmsiv_aes_ks_enc_x1 .type aes256gcmsiv_aes_ks_enc_x1,@function .align 16 aes256gcmsiv_aes_ks_enc_x1: .cfi_startproc _CET_ENDBR vmovdqa con1(%rip),%xmm0 vmovdqa mask(%rip),%xmm15 vmovdqa (%rdi),%xmm8 vmovdqa (%rcx),%xmm1 vmovdqa 16(%rcx),%xmm3 vpxor %xmm1,%xmm8,%xmm8 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm1,(%rdx) vmovdqu %xmm3,16(%rdx) vpxor %xmm14,%xmm14,%xmm14 vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,32(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,48(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,64(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,80(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,96(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,112(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,128(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,144(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,160(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,176(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslld $1,%xmm0,%xmm0 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenc %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,192(%rdx) vpshufd $0xff,%xmm1,%xmm2 vaesenclast %xmm14,%xmm2,%xmm2 vpslldq $4,%xmm3,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpxor %xmm2,%xmm3,%xmm3 vaesenc %xmm3,%xmm8,%xmm8 vmovdqu %xmm3,208(%rdx) vpshufb %xmm15,%xmm3,%xmm2 vaesenclast %xmm0,%xmm2,%xmm2 vpslldq $4,%xmm1,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpslldq $4,%xmm4,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpxor %xmm2,%xmm1,%xmm1 vaesenclast %xmm1,%xmm8,%xmm8 vmovdqu %xmm1,224(%rdx) vmovdqa %xmm8,(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size aes256gcmsiv_aes_ks_enc_x1,.-aes256gcmsiv_aes_ks_enc_x1 .globl aes256gcmsiv_ecb_enc_block .hidden aes256gcmsiv_ecb_enc_block .type aes256gcmsiv_ecb_enc_block,@function .align 16 aes256gcmsiv_ecb_enc_block: .cfi_startproc _CET_ENDBR vmovdqa (%rdi),%xmm1 vpxor (%rdx),%xmm1,%xmm1 vaesenc 16(%rdx),%xmm1,%xmm1 vaesenc 32(%rdx),%xmm1,%xmm1 vaesenc 48(%rdx),%xmm1,%xmm1 vaesenc 64(%rdx),%xmm1,%xmm1 vaesenc 80(%rdx),%xmm1,%xmm1 vaesenc 96(%rdx),%xmm1,%xmm1 vaesenc 112(%rdx),%xmm1,%xmm1 vaesenc 128(%rdx),%xmm1,%xmm1 vaesenc 144(%rdx),%xmm1,%xmm1 vaesenc 160(%rdx),%xmm1,%xmm1 vaesenc 176(%rdx),%xmm1,%xmm1 vaesenc 192(%rdx),%xmm1,%xmm1 vaesenc 208(%rdx),%xmm1,%xmm1 vaesenclast 224(%rdx),%xmm1,%xmm1 vmovdqa %xmm1,(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size aes256gcmsiv_ecb_enc_block,.-aes256gcmsiv_ecb_enc_block .globl aes256gcmsiv_enc_msg_x4 .hidden aes256gcmsiv_enc_msg_x4 .type aes256gcmsiv_enc_msg_x4,@function .align 16 aes256gcmsiv_enc_msg_x4: .cfi_startproc _CET_ENDBR testq %r8,%r8 jnz .L256_enc_msg_x4_start .byte 0xf3,0xc3 .L256_enc_msg_x4_start: movq %r8,%r10 shrq $4,%r8 shlq $60,%r10 jz .L256_enc_msg_x4_start2 addq $1,%r8 .L256_enc_msg_x4_start2: movq %r8,%r10 shlq $62,%r10 shrq $62,%r10 vmovdqa (%rdx),%xmm15 vpor OR_MASK(%rip),%xmm15,%xmm15 vmovdqa four(%rip),%xmm4 vmovdqa %xmm15,%xmm0 vpaddd one(%rip),%xmm15,%xmm1 vpaddd two(%rip),%xmm15,%xmm2 vpaddd three(%rip),%xmm15,%xmm3 shrq $2,%r8 je .L256_enc_msg_x4_check_remainder subq $64,%rsi subq $64,%rdi .L256_enc_msg_x4_loop1: addq $64,%rsi addq $64,%rdi vmovdqa %xmm0,%xmm5 vmovdqa %xmm1,%xmm6 vmovdqa %xmm2,%xmm7 vmovdqa %xmm3,%xmm8 vpxor (%rcx),%xmm5,%xmm5 vpxor (%rcx),%xmm6,%xmm6 vpxor (%rcx),%xmm7,%xmm7 vpxor (%rcx),%xmm8,%xmm8 vmovdqu 16(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm0,%xmm0 vmovdqu 32(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm1,%xmm1 vmovdqu 48(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm2,%xmm2 vmovdqu 64(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vpaddd %xmm4,%xmm3,%xmm3 vmovdqu 80(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 96(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 112(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 128(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 144(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 160(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 176(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 192(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 208(%rcx),%xmm12 vaesenc %xmm12,%xmm5,%xmm5 vaesenc %xmm12,%xmm6,%xmm6 vaesenc %xmm12,%xmm7,%xmm7 vaesenc %xmm12,%xmm8,%xmm8 vmovdqu 224(%rcx),%xmm12 vaesenclast %xmm12,%xmm5,%xmm5 vaesenclast %xmm12,%xmm6,%xmm6 vaesenclast %xmm12,%xmm7,%xmm7 vaesenclast %xmm12,%xmm8,%xmm8 vpxor 0(%rdi),%xmm5,%xmm5 vpxor 16(%rdi),%xmm6,%xmm6 vpxor 32(%rdi),%xmm7,%xmm7 vpxor 48(%rdi),%xmm8,%xmm8 subq $1,%r8 vmovdqu %xmm5,0(%rsi) vmovdqu %xmm6,16(%rsi) vmovdqu %xmm7,32(%rsi) vmovdqu %xmm8,48(%rsi) jne .L256_enc_msg_x4_loop1 addq $64,%rsi addq $64,%rdi .L256_enc_msg_x4_check_remainder: cmpq $0,%r10 je .L256_enc_msg_x4_out .L256_enc_msg_x4_loop2: vmovdqa %xmm0,%xmm5 vpaddd one(%rip),%xmm0,%xmm0 vpxor (%rcx),%xmm5,%xmm5 vaesenc 16(%rcx),%xmm5,%xmm5 vaesenc 32(%rcx),%xmm5,%xmm5 vaesenc 48(%rcx),%xmm5,%xmm5 vaesenc 64(%rcx),%xmm5,%xmm5 vaesenc 80(%rcx),%xmm5,%xmm5 vaesenc 96(%rcx),%xmm5,%xmm5 vaesenc 112(%rcx),%xmm5,%xmm5 vaesenc 128(%rcx),%xmm5,%xmm5 vaesenc 144(%rcx),%xmm5,%xmm5 vaesenc 160(%rcx),%xmm5,%xmm5 vaesenc 176(%rcx),%xmm5,%xmm5 vaesenc 192(%rcx),%xmm5,%xmm5 vaesenc 208(%rcx),%xmm5,%xmm5 vaesenclast 224(%rcx),%xmm5,%xmm5 vpxor (%rdi),%xmm5,%xmm5 vmovdqu %xmm5,(%rsi) addq $16,%rdi addq $16,%rsi subq $1,%r10 jne .L256_enc_msg_x4_loop2 .L256_enc_msg_x4_out: .byte 0xf3,0xc3 .cfi_endproc .size aes256gcmsiv_enc_msg_x4,.-aes256gcmsiv_enc_msg_x4 .globl aes256gcmsiv_enc_msg_x8 .hidden aes256gcmsiv_enc_msg_x8 .type aes256gcmsiv_enc_msg_x8,@function .align 16 aes256gcmsiv_enc_msg_x8: .cfi_startproc _CET_ENDBR testq %r8,%r8 jnz .L256_enc_msg_x8_start .byte 0xf3,0xc3 .L256_enc_msg_x8_start: movq %rsp,%r11 subq $16,%r11 andq $-64,%r11 movq %r8,%r10 shrq $4,%r8 shlq $60,%r10 jz .L256_enc_msg_x8_start2 addq $1,%r8 .L256_enc_msg_x8_start2: movq %r8,%r10 shlq $61,%r10 shrq $61,%r10 vmovdqa (%rdx),%xmm1 vpor OR_MASK(%rip),%xmm1,%xmm1 vpaddd seven(%rip),%xmm1,%xmm0 vmovdqa %xmm0,(%r11) vpaddd one(%rip),%xmm1,%xmm9 vpaddd two(%rip),%xmm1,%xmm10 vpaddd three(%rip),%xmm1,%xmm11 vpaddd four(%rip),%xmm1,%xmm12 vpaddd five(%rip),%xmm1,%xmm13 vpaddd six(%rip),%xmm1,%xmm14 vmovdqa %xmm1,%xmm0 shrq $3,%r8 jz .L256_enc_msg_x8_check_remainder subq $128,%rsi subq $128,%rdi .L256_enc_msg_x8_loop1: addq $128,%rsi addq $128,%rdi vmovdqa %xmm0,%xmm1 vmovdqa %xmm9,%xmm2 vmovdqa %xmm10,%xmm3 vmovdqa %xmm11,%xmm4 vmovdqa %xmm12,%xmm5 vmovdqa %xmm13,%xmm6 vmovdqa %xmm14,%xmm7 vmovdqa (%r11),%xmm8 vpxor (%rcx),%xmm1,%xmm1 vpxor (%rcx),%xmm2,%xmm2 vpxor (%rcx),%xmm3,%xmm3 vpxor (%rcx),%xmm4,%xmm4 vpxor (%rcx),%xmm5,%xmm5 vpxor (%rcx),%xmm6,%xmm6 vpxor (%rcx),%xmm7,%xmm7 vpxor (%rcx),%xmm8,%xmm8 vmovdqu 16(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqa (%r11),%xmm14 vpaddd eight(%rip),%xmm14,%xmm14 vmovdqa %xmm14,(%r11) vmovdqu 32(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpsubd one(%rip),%xmm14,%xmm14 vmovdqu 48(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm0,%xmm0 vmovdqu 64(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm9,%xmm9 vmovdqu 80(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm10,%xmm10 vmovdqu 96(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm11,%xmm11 vmovdqu 112(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm12,%xmm12 vmovdqu 128(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vpaddd eight(%rip),%xmm13,%xmm13 vmovdqu 144(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 160(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 176(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 192(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 208(%rcx),%xmm15 vaesenc %xmm15,%xmm1,%xmm1 vaesenc %xmm15,%xmm2,%xmm2 vaesenc %xmm15,%xmm3,%xmm3 vaesenc %xmm15,%xmm4,%xmm4 vaesenc %xmm15,%xmm5,%xmm5 vaesenc %xmm15,%xmm6,%xmm6 vaesenc %xmm15,%xmm7,%xmm7 vaesenc %xmm15,%xmm8,%xmm8 vmovdqu 224(%rcx),%xmm15 vaesenclast %xmm15,%xmm1,%xmm1 vaesenclast %xmm15,%xmm2,%xmm2 vaesenclast %xmm15,%xmm3,%xmm3 vaesenclast %xmm15,%xmm4,%xmm4 vaesenclast %xmm15,%xmm5,%xmm5 vaesenclast %xmm15,%xmm6,%xmm6 vaesenclast %xmm15,%xmm7,%xmm7 vaesenclast %xmm15,%xmm8,%xmm8 vpxor 0(%rdi),%xmm1,%xmm1 vpxor 16(%rdi),%xmm2,%xmm2 vpxor 32(%rdi),%xmm3,%xmm3 vpxor 48(%rdi),%xmm4,%xmm4 vpxor 64(%rdi),%xmm5,%xmm5 vpxor 80(%rdi),%xmm6,%xmm6 vpxor 96(%rdi),%xmm7,%xmm7 vpxor 112(%rdi),%xmm8,%xmm8 subq $1,%r8 vmovdqu %xmm1,0(%rsi) vmovdqu %xmm2,16(%rsi) vmovdqu %xmm3,32(%rsi) vmovdqu %xmm4,48(%rsi) vmovdqu %xmm5,64(%rsi) vmovdqu %xmm6,80(%rsi) vmovdqu %xmm7,96(%rsi) vmovdqu %xmm8,112(%rsi) jne .L256_enc_msg_x8_loop1 addq $128,%rsi addq $128,%rdi .L256_enc_msg_x8_check_remainder: cmpq $0,%r10 je .L256_enc_msg_x8_out .L256_enc_msg_x8_loop2: vmovdqa %xmm0,%xmm1 vpaddd one(%rip),%xmm0,%xmm0 vpxor (%rcx),%xmm1,%xmm1 vaesenc 16(%rcx),%xmm1,%xmm1 vaesenc 32(%rcx),%xmm1,%xmm1 vaesenc 48(%rcx),%xmm1,%xmm1 vaesenc 64(%rcx),%xmm1,%xmm1 vaesenc 80(%rcx),%xmm1,%xmm1 vaesenc 96(%rcx),%xmm1,%xmm1 vaesenc 112(%rcx),%xmm1,%xmm1 vaesenc 128(%rcx),%xmm1,%xmm1 vaesenc 144(%rcx),%xmm1,%xmm1 vaesenc 160(%rcx),%xmm1,%xmm1 vaesenc 176(%rcx),%xmm1,%xmm1 vaesenc 192(%rcx),%xmm1,%xmm1 vaesenc 208(%rcx),%xmm1,%xmm1 vaesenclast 224(%rcx),%xmm1,%xmm1 vpxor (%rdi),%xmm1,%xmm1 vmovdqu %xmm1,(%rsi) addq $16,%rdi addq $16,%rsi subq $1,%r10 jnz .L256_enc_msg_x8_loop2 .L256_enc_msg_x8_out: .byte 0xf3,0xc3 .cfi_endproc .size aes256gcmsiv_enc_msg_x8,.-aes256gcmsiv_enc_msg_x8 .globl aes256gcmsiv_dec .hidden aes256gcmsiv_dec .type aes256gcmsiv_dec,@function .align 16 aes256gcmsiv_dec: .cfi_startproc _CET_ENDBR testq $~15,%r9 jnz .L256_dec_start .byte 0xf3,0xc3 .L256_dec_start: vzeroupper vmovdqa (%rdx),%xmm0 vmovdqu 16(%rdx),%xmm15 vpor OR_MASK(%rip),%xmm15,%xmm15 movq %rdx,%rax leaq 32(%rax),%rax leaq 32(%rcx),%rcx andq $~15,%r9 cmpq $96,%r9 jb .L256_dec_loop2 subq $96,%r9 vmovdqa %xmm15,%xmm7 vpaddd one(%rip),%xmm7,%xmm8 vpaddd two(%rip),%xmm7,%xmm9 vpaddd one(%rip),%xmm9,%xmm10 vpaddd two(%rip),%xmm9,%xmm11 vpaddd one(%rip),%xmm11,%xmm12 vpaddd two(%rip),%xmm11,%xmm15 vpxor (%r8),%xmm7,%xmm7 vpxor (%r8),%xmm8,%xmm8 vpxor (%r8),%xmm9,%xmm9 vpxor (%r8),%xmm10,%xmm10 vpxor (%r8),%xmm11,%xmm11 vpxor (%r8),%xmm12,%xmm12 vmovdqu 16(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 32(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 48(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 64(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 80(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 96(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 112(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 128(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 144(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 160(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 176(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 192(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 208(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 224(%r8),%xmm4 vaesenclast %xmm4,%xmm7,%xmm7 vaesenclast %xmm4,%xmm8,%xmm8 vaesenclast %xmm4,%xmm9,%xmm9 vaesenclast %xmm4,%xmm10,%xmm10 vaesenclast %xmm4,%xmm11,%xmm11 vaesenclast %xmm4,%xmm12,%xmm12 vpxor 0(%rdi),%xmm7,%xmm7 vpxor 16(%rdi),%xmm8,%xmm8 vpxor 32(%rdi),%xmm9,%xmm9 vpxor 48(%rdi),%xmm10,%xmm10 vpxor 64(%rdi),%xmm11,%xmm11 vpxor 80(%rdi),%xmm12,%xmm12 vmovdqu %xmm7,0(%rsi) vmovdqu %xmm8,16(%rsi) vmovdqu %xmm9,32(%rsi) vmovdqu %xmm10,48(%rsi) vmovdqu %xmm11,64(%rsi) vmovdqu %xmm12,80(%rsi) addq $96,%rdi addq $96,%rsi jmp .L256_dec_loop1 .align 64 .L256_dec_loop1: cmpq $96,%r9 jb .L256_dec_finish_96 subq $96,%r9 vmovdqa %xmm12,%xmm6 vmovdqa %xmm11,16-32(%rax) vmovdqa %xmm10,32-32(%rax) vmovdqa %xmm9,48-32(%rax) vmovdqa %xmm8,64-32(%rax) vmovdqa %xmm7,80-32(%rax) vmovdqa %xmm15,%xmm7 vpaddd one(%rip),%xmm7,%xmm8 vpaddd two(%rip),%xmm7,%xmm9 vpaddd one(%rip),%xmm9,%xmm10 vpaddd two(%rip),%xmm9,%xmm11 vpaddd one(%rip),%xmm11,%xmm12 vpaddd two(%rip),%xmm11,%xmm15 vmovdqa (%r8),%xmm4 vpxor %xmm4,%xmm7,%xmm7 vpxor %xmm4,%xmm8,%xmm8 vpxor %xmm4,%xmm9,%xmm9 vpxor %xmm4,%xmm10,%xmm10 vpxor %xmm4,%xmm11,%xmm11 vpxor %xmm4,%xmm12,%xmm12 vmovdqu 0-32(%rcx),%xmm4 vpclmulqdq $0x11,%xmm4,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm4,%xmm6,%xmm3 vpclmulqdq $0x01,%xmm4,%xmm6,%xmm1 vpclmulqdq $0x10,%xmm4,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 16(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu -16(%rax),%xmm6 vmovdqu -16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 32(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 0(%rax),%xmm6 vmovdqu 0(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 48(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 16(%rax),%xmm6 vmovdqu 16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 64(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 32(%rax),%xmm6 vmovdqu 32(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 80(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 96(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 112(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqa 80-32(%rax),%xmm6 vpxor %xmm0,%xmm6,%xmm6 vmovdqu 80-32(%rcx),%xmm5 vpclmulqdq $0x01,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x10,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 128(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vpsrldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm2,%xmm5 vpslldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm0 vmovdqa poly(%rip),%xmm3 vmovdqu 144(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 160(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 176(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 192(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 208(%r8),%xmm4 vaesenc %xmm4,%xmm7,%xmm7 vaesenc %xmm4,%xmm8,%xmm8 vaesenc %xmm4,%xmm9,%xmm9 vaesenc %xmm4,%xmm10,%xmm10 vaesenc %xmm4,%xmm11,%xmm11 vaesenc %xmm4,%xmm12,%xmm12 vmovdqu 224(%r8),%xmm6 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpxor 0(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm7,%xmm7 vpxor 16(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm8,%xmm8 vpxor 32(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm9,%xmm9 vpxor 48(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm10,%xmm10 vpxor 64(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm11,%xmm11 vpxor 80(%rdi),%xmm6,%xmm4 vaesenclast %xmm4,%xmm12,%xmm12 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vmovdqu %xmm7,0(%rsi) vmovdqu %xmm8,16(%rsi) vmovdqu %xmm9,32(%rsi) vmovdqu %xmm10,48(%rsi) vmovdqu %xmm11,64(%rsi) vmovdqu %xmm12,80(%rsi) vpxor %xmm5,%xmm0,%xmm0 leaq 96(%rdi),%rdi leaq 96(%rsi),%rsi jmp .L256_dec_loop1 .L256_dec_finish_96: vmovdqa %xmm12,%xmm6 vmovdqa %xmm11,16-32(%rax) vmovdqa %xmm10,32-32(%rax) vmovdqa %xmm9,48-32(%rax) vmovdqa %xmm8,64-32(%rax) vmovdqa %xmm7,80-32(%rax) vmovdqu 0-32(%rcx),%xmm4 vpclmulqdq $0x10,%xmm4,%xmm6,%xmm1 vpclmulqdq $0x11,%xmm4,%xmm6,%xmm2 vpclmulqdq $0x00,%xmm4,%xmm6,%xmm3 vpclmulqdq $0x01,%xmm4,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu -16(%rax),%xmm6 vmovdqu -16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 0(%rax),%xmm6 vmovdqu 0(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 16(%rax),%xmm6 vmovdqu 16(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 32(%rax),%xmm6 vmovdqu 32(%rcx),%xmm13 vpclmulqdq $0x10,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x11,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x01,%xmm13,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vmovdqu 80-32(%rax),%xmm6 vpxor %xmm0,%xmm6,%xmm6 vmovdqu 80-32(%rcx),%xmm5 vpclmulqdq $0x11,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm2,%xmm2 vpclmulqdq $0x00,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm3,%xmm3 vpclmulqdq $0x10,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpclmulqdq $0x01,%xmm5,%xmm6,%xmm4 vpxor %xmm4,%xmm1,%xmm1 vpsrldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm2,%xmm5 vpslldq $8,%xmm1,%xmm4 vpxor %xmm4,%xmm3,%xmm0 vmovdqa poly(%rip),%xmm3 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpalignr $8,%xmm0,%xmm0,%xmm2 vpclmulqdq $0x10,%xmm3,%xmm0,%xmm0 vpxor %xmm0,%xmm2,%xmm0 vpxor %xmm5,%xmm0,%xmm0 .L256_dec_loop2: cmpq $16,%r9 jb .L256_dec_out subq $16,%r9 vmovdqa %xmm15,%xmm2 vpaddd one(%rip),%xmm15,%xmm15 vpxor 0(%r8),%xmm2,%xmm2 vaesenc 16(%r8),%xmm2,%xmm2 vaesenc 32(%r8),%xmm2,%xmm2 vaesenc 48(%r8),%xmm2,%xmm2 vaesenc 64(%r8),%xmm2,%xmm2 vaesenc 80(%r8),%xmm2,%xmm2 vaesenc 96(%r8),%xmm2,%xmm2 vaesenc 112(%r8),%xmm2,%xmm2 vaesenc 128(%r8),%xmm2,%xmm2 vaesenc 144(%r8),%xmm2,%xmm2 vaesenc 160(%r8),%xmm2,%xmm2 vaesenc 176(%r8),%xmm2,%xmm2 vaesenc 192(%r8),%xmm2,%xmm2 vaesenc 208(%r8),%xmm2,%xmm2 vaesenclast 224(%r8),%xmm2,%xmm2 vpxor (%rdi),%xmm2,%xmm2 vmovdqu %xmm2,(%rsi) addq $16,%rdi addq $16,%rsi vpxor %xmm2,%xmm0,%xmm0 vmovdqa -32(%rcx),%xmm1 call GFMUL jmp .L256_dec_loop2 .L256_dec_out: vmovdqu %xmm0,(%rdx) .byte 0xf3,0xc3 .cfi_endproc .size aes256gcmsiv_dec, .-aes256gcmsiv_dec .globl aes256gcmsiv_kdf .hidden aes256gcmsiv_kdf .type aes256gcmsiv_kdf,@function .align 16 aes256gcmsiv_kdf: .cfi_startproc _CET_ENDBR vmovdqa (%rdx),%xmm1 vmovdqa 0(%rdi),%xmm4 vmovdqa and_mask(%rip),%xmm11 vmovdqa one(%rip),%xmm8 vpshufd $0x90,%xmm4,%xmm4 vpand %xmm11,%xmm4,%xmm4 vpaddd %xmm8,%xmm4,%xmm6 vpaddd %xmm8,%xmm6,%xmm7 vpaddd %xmm8,%xmm7,%xmm11 vpaddd %xmm8,%xmm11,%xmm12 vpaddd %xmm8,%xmm12,%xmm13 vpxor %xmm1,%xmm4,%xmm4 vpxor %xmm1,%xmm6,%xmm6 vpxor %xmm1,%xmm7,%xmm7 vpxor %xmm1,%xmm11,%xmm11 vpxor %xmm1,%xmm12,%xmm12 vpxor %xmm1,%xmm13,%xmm13 vmovdqa 16(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 32(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 48(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 64(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 80(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 96(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 112(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 128(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 144(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 160(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 176(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 192(%rdx),%xmm2 vaesenc %xmm2,%xmm4,%xmm4 vaesenc %xmm2,%xmm6,%xmm6 vaesenc %xmm2,%xmm7,%xmm7 vaesenc %xmm2,%xmm11,%xmm11 vaesenc %xmm2,%xmm12,%xmm12 vaesenc %xmm2,%xmm13,%xmm13 vmovdqa 208(%rdx),%xmm1 vaesenc %xmm1,%xmm4,%xmm4 vaesenc %xmm1,%xmm6,%xmm6 vaesenc %xmm1,%xmm7,%xmm7 vaesenc %xmm1,%xmm11,%xmm11 vaesenc %xmm1,%xmm12,%xmm12 vaesenc %xmm1,%xmm13,%xmm13 vmovdqa 224(%rdx),%xmm2 vaesenclast %xmm2,%xmm4,%xmm4 vaesenclast %xmm2,%xmm6,%xmm6 vaesenclast %xmm2,%xmm7,%xmm7 vaesenclast %xmm2,%xmm11,%xmm11 vaesenclast %xmm2,%xmm12,%xmm12 vaesenclast %xmm2,%xmm13,%xmm13 vmovdqa %xmm4,0(%rsi) vmovdqa %xmm6,16(%rsi) vmovdqa %xmm7,32(%rsi) vmovdqa %xmm11,48(%rsi) vmovdqa %xmm12,64(%rsi) vmovdqa %xmm13,80(%rsi) .byte 0xf3,0xc3 .cfi_endproc .size aes256gcmsiv_kdf, .-aes256gcmsiv_kdf #endif
marvin-hansen/iggy-streaming-system
58,628
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/linux-x86_64/crypto/cipher_extra/aesni-sha1-x86_64.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86_64) && defined(__ELF__) .text .extern OPENSSL_ia32cap_P .hidden OPENSSL_ia32cap_P .globl aesni_cbc_sha1_enc .hidden aesni_cbc_sha1_enc .type aesni_cbc_sha1_enc,@function .align 32 aesni_cbc_sha1_enc: .cfi_startproc movl OPENSSL_ia32cap_P+0(%rip),%r10d movq OPENSSL_ia32cap_P+4(%rip),%r11 btq $61,%r11 jc aesni_cbc_sha1_enc_shaext andl $268435456,%r11d andl $1073741824,%r10d orl %r11d,%r10d cmpl $1342177280,%r10d je aesni_cbc_sha1_enc_avx jmp aesni_cbc_sha1_enc_ssse3 .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha1_enc,.-aesni_cbc_sha1_enc .type aesni_cbc_sha1_enc_ssse3,@function .align 32 aesni_cbc_sha1_enc_ssse3: .cfi_startproc movq 8(%rsp),%r10 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 leaq -104(%rsp),%rsp .cfi_adjust_cfa_offset 104 movq %rdi,%r12 movq %rsi,%r13 movq %rdx,%r14 leaq 112(%rcx),%r15 movdqu (%r8),%xmm2 movq %r8,88(%rsp) shlq $6,%r14 subq %r12,%r13 movl 240-112(%r15),%r8d addq %r10,%r14 leaq K_XX_XX(%rip),%r11 movl 0(%r9),%eax movl 4(%r9),%ebx movl 8(%r9),%ecx movl 12(%r9),%edx movl %ebx,%esi movl 16(%r9),%ebp movl %ecx,%edi xorl %edx,%edi andl %edi,%esi movdqa 64(%r11),%xmm3 movdqa 0(%r11),%xmm13 movdqu 0(%r10),%xmm4 movdqu 16(%r10),%xmm5 movdqu 32(%r10),%xmm6 movdqu 48(%r10),%xmm7 .byte 102,15,56,0,227 .byte 102,15,56,0,235 .byte 102,15,56,0,243 addq $64,%r10 paddd %xmm13,%xmm4 .byte 102,15,56,0,251 paddd %xmm13,%xmm5 paddd %xmm13,%xmm6 movdqa %xmm4,0(%rsp) psubd %xmm13,%xmm4 movdqa %xmm5,16(%rsp) psubd %xmm13,%xmm5 movdqa %xmm6,32(%rsp) psubd %xmm13,%xmm6 movups -112(%r15),%xmm15 movups 16-112(%r15),%xmm0 jmp .Loop_ssse3 .align 32 .Loop_ssse3: rorl $2,%ebx movups 0(%r12),%xmm14 xorps %xmm15,%xmm14 xorps %xmm14,%xmm2 movups -80(%r15),%xmm1 .byte 102,15,56,220,208 pshufd $238,%xmm4,%xmm8 xorl %edx,%esi movdqa %xmm7,%xmm12 paddd %xmm7,%xmm13 movl %eax,%edi addl 0(%rsp),%ebp punpcklqdq %xmm5,%xmm8 xorl %ecx,%ebx roll $5,%eax addl %esi,%ebp psrldq $4,%xmm12 andl %ebx,%edi xorl %ecx,%ebx pxor %xmm4,%xmm8 addl %eax,%ebp rorl $7,%eax pxor %xmm6,%xmm12 xorl %ecx,%edi movl %ebp,%esi addl 4(%rsp),%edx pxor %xmm12,%xmm8 xorl %ebx,%eax roll $5,%ebp movdqa %xmm13,48(%rsp) addl %edi,%edx movups -64(%r15),%xmm0 .byte 102,15,56,220,209 andl %eax,%esi movdqa %xmm8,%xmm3 xorl %ebx,%eax addl %ebp,%edx rorl $7,%ebp movdqa %xmm8,%xmm12 xorl %ebx,%esi pslldq $12,%xmm3 paddd %xmm8,%xmm8 movl %edx,%edi addl 8(%rsp),%ecx psrld $31,%xmm12 xorl %eax,%ebp roll $5,%edx addl %esi,%ecx movdqa %xmm3,%xmm13 andl %ebp,%edi xorl %eax,%ebp psrld $30,%xmm3 addl %edx,%ecx rorl $7,%edx por %xmm12,%xmm8 xorl %eax,%edi movl %ecx,%esi addl 12(%rsp),%ebx movups -48(%r15),%xmm1 .byte 102,15,56,220,208 pslld $2,%xmm13 pxor %xmm3,%xmm8 xorl %ebp,%edx movdqa 0(%r11),%xmm3 roll $5,%ecx addl %edi,%ebx andl %edx,%esi pxor %xmm13,%xmm8 xorl %ebp,%edx addl %ecx,%ebx rorl $7,%ecx pshufd $238,%xmm5,%xmm9 xorl %ebp,%esi movdqa %xmm8,%xmm13 paddd %xmm8,%xmm3 movl %ebx,%edi addl 16(%rsp),%eax punpcklqdq %xmm6,%xmm9 xorl %edx,%ecx roll $5,%ebx addl %esi,%eax psrldq $4,%xmm13 andl %ecx,%edi xorl %edx,%ecx pxor %xmm5,%xmm9 addl %ebx,%eax rorl $7,%ebx movups -32(%r15),%xmm0 .byte 102,15,56,220,209 pxor %xmm7,%xmm13 xorl %edx,%edi movl %eax,%esi addl 20(%rsp),%ebp pxor %xmm13,%xmm9 xorl %ecx,%ebx roll $5,%eax movdqa %xmm3,0(%rsp) addl %edi,%ebp andl %ebx,%esi movdqa %xmm9,%xmm12 xorl %ecx,%ebx addl %eax,%ebp rorl $7,%eax movdqa %xmm9,%xmm13 xorl %ecx,%esi pslldq $12,%xmm12 paddd %xmm9,%xmm9 movl %ebp,%edi addl 24(%rsp),%edx psrld $31,%xmm13 xorl %ebx,%eax roll $5,%ebp addl %esi,%edx movups -16(%r15),%xmm1 .byte 102,15,56,220,208 movdqa %xmm12,%xmm3 andl %eax,%edi xorl %ebx,%eax psrld $30,%xmm12 addl %ebp,%edx rorl $7,%ebp por %xmm13,%xmm9 xorl %ebx,%edi movl %edx,%esi addl 28(%rsp),%ecx pslld $2,%xmm3 pxor %xmm12,%xmm9 xorl %eax,%ebp movdqa 16(%r11),%xmm12 roll $5,%edx addl %edi,%ecx andl %ebp,%esi pxor %xmm3,%xmm9 xorl %eax,%ebp addl %edx,%ecx rorl $7,%edx pshufd $238,%xmm6,%xmm10 xorl %eax,%esi movdqa %xmm9,%xmm3 paddd %xmm9,%xmm12 movl %ecx,%edi addl 32(%rsp),%ebx movups 0(%r15),%xmm0 .byte 102,15,56,220,209 punpcklqdq %xmm7,%xmm10 xorl %ebp,%edx roll $5,%ecx addl %esi,%ebx psrldq $4,%xmm3 andl %edx,%edi xorl %ebp,%edx pxor %xmm6,%xmm10 addl %ecx,%ebx rorl $7,%ecx pxor %xmm8,%xmm3 xorl %ebp,%edi movl %ebx,%esi addl 36(%rsp),%eax pxor %xmm3,%xmm10 xorl %edx,%ecx roll $5,%ebx movdqa %xmm12,16(%rsp) addl %edi,%eax andl %ecx,%esi movdqa %xmm10,%xmm13 xorl %edx,%ecx addl %ebx,%eax rorl $7,%ebx movups 16(%r15),%xmm1 .byte 102,15,56,220,208 movdqa %xmm10,%xmm3 xorl %edx,%esi pslldq $12,%xmm13 paddd %xmm10,%xmm10 movl %eax,%edi addl 40(%rsp),%ebp psrld $31,%xmm3 xorl %ecx,%ebx roll $5,%eax addl %esi,%ebp movdqa %xmm13,%xmm12 andl %ebx,%edi xorl %ecx,%ebx psrld $30,%xmm13 addl %eax,%ebp rorl $7,%eax por %xmm3,%xmm10 xorl %ecx,%edi movl %ebp,%esi addl 44(%rsp),%edx pslld $2,%xmm12 pxor %xmm13,%xmm10 xorl %ebx,%eax movdqa 16(%r11),%xmm13 roll $5,%ebp addl %edi,%edx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 andl %eax,%esi pxor %xmm12,%xmm10 xorl %ebx,%eax addl %ebp,%edx rorl $7,%ebp pshufd $238,%xmm7,%xmm11 xorl %ebx,%esi movdqa %xmm10,%xmm12 paddd %xmm10,%xmm13 movl %edx,%edi addl 48(%rsp),%ecx punpcklqdq %xmm8,%xmm11 xorl %eax,%ebp roll $5,%edx addl %esi,%ecx psrldq $4,%xmm12 andl %ebp,%edi xorl %eax,%ebp pxor %xmm7,%xmm11 addl %edx,%ecx rorl $7,%edx pxor %xmm9,%xmm12 xorl %eax,%edi movl %ecx,%esi addl 52(%rsp),%ebx movups 48(%r15),%xmm1 .byte 102,15,56,220,208 pxor %xmm12,%xmm11 xorl %ebp,%edx roll $5,%ecx movdqa %xmm13,32(%rsp) addl %edi,%ebx andl %edx,%esi movdqa %xmm11,%xmm3 xorl %ebp,%edx addl %ecx,%ebx rorl $7,%ecx movdqa %xmm11,%xmm12 xorl %ebp,%esi pslldq $12,%xmm3 paddd %xmm11,%xmm11 movl %ebx,%edi addl 56(%rsp),%eax psrld $31,%xmm12 xorl %edx,%ecx roll $5,%ebx addl %esi,%eax movdqa %xmm3,%xmm13 andl %ecx,%edi xorl %edx,%ecx psrld $30,%xmm3 addl %ebx,%eax rorl $7,%ebx cmpl $11,%r8d jb .Laesenclast1 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je .Laesenclast1 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 .Laesenclast1: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 por %xmm12,%xmm11 xorl %edx,%edi movl %eax,%esi addl 60(%rsp),%ebp pslld $2,%xmm13 pxor %xmm3,%xmm11 xorl %ecx,%ebx movdqa 16(%r11),%xmm3 roll $5,%eax addl %edi,%ebp andl %ebx,%esi pxor %xmm13,%xmm11 pshufd $238,%xmm10,%xmm13 xorl %ecx,%ebx addl %eax,%ebp rorl $7,%eax pxor %xmm8,%xmm4 xorl %ecx,%esi movl %ebp,%edi addl 0(%rsp),%edx punpcklqdq %xmm11,%xmm13 xorl %ebx,%eax roll $5,%ebp pxor %xmm5,%xmm4 addl %esi,%edx movups 16(%r12),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,0(%r12,%r13,1) xorps %xmm14,%xmm2 movups -80(%r15),%xmm1 .byte 102,15,56,220,208 andl %eax,%edi movdqa %xmm3,%xmm12 xorl %ebx,%eax paddd %xmm11,%xmm3 addl %ebp,%edx pxor %xmm13,%xmm4 rorl $7,%ebp xorl %ebx,%edi movl %edx,%esi addl 4(%rsp),%ecx movdqa %xmm4,%xmm13 xorl %eax,%ebp roll $5,%edx movdqa %xmm3,48(%rsp) addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp pslld $2,%xmm4 addl %edx,%ecx rorl $7,%edx psrld $30,%xmm13 xorl %eax,%esi movl %ecx,%edi addl 8(%rsp),%ebx movups -64(%r15),%xmm0 .byte 102,15,56,220,209 por %xmm13,%xmm4 xorl %ebp,%edx roll $5,%ecx pshufd $238,%xmm11,%xmm3 addl %esi,%ebx andl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 12(%rsp),%eax xorl %ebp,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax pxor %xmm9,%xmm5 addl 16(%rsp),%ebp movups -48(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%esi punpcklqdq %xmm4,%xmm3 movl %eax,%edi roll $5,%eax pxor %xmm6,%xmm5 addl %esi,%ebp xorl %ecx,%edi movdqa %xmm12,%xmm13 rorl $7,%ebx paddd %xmm4,%xmm12 addl %eax,%ebp pxor %xmm3,%xmm5 addl 20(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp movdqa %xmm5,%xmm3 addl %edi,%edx xorl %ebx,%esi movdqa %xmm12,0(%rsp) rorl $7,%eax addl %ebp,%edx addl 24(%rsp),%ecx pslld $2,%xmm5 xorl %eax,%esi movl %edx,%edi psrld $30,%xmm3 roll $5,%edx addl %esi,%ecx movups -32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi rorl $7,%ebp por %xmm3,%xmm5 addl %edx,%ecx addl 28(%rsp),%ebx pshufd $238,%xmm4,%xmm12 xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx pxor %xmm10,%xmm6 addl 32(%rsp),%eax xorl %edx,%esi punpcklqdq %xmm5,%xmm12 movl %ebx,%edi roll $5,%ebx pxor %xmm7,%xmm6 addl %esi,%eax xorl %edx,%edi movdqa 32(%r11),%xmm3 rorl $7,%ecx paddd %xmm5,%xmm13 addl %ebx,%eax pxor %xmm12,%xmm6 addl 36(%rsp),%ebp movups -16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%edi movl %eax,%esi roll $5,%eax movdqa %xmm6,%xmm12 addl %edi,%ebp xorl %ecx,%esi movdqa %xmm13,16(%rsp) rorl $7,%ebx addl %eax,%ebp addl 40(%rsp),%edx pslld $2,%xmm6 xorl %ebx,%esi movl %ebp,%edi psrld $30,%xmm12 roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax por %xmm12,%xmm6 addl %ebp,%edx addl 44(%rsp),%ecx pshufd $238,%xmm5,%xmm13 xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx movups 0(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx pxor %xmm11,%xmm7 addl 48(%rsp),%ebx xorl %ebp,%esi punpcklqdq %xmm6,%xmm13 movl %ecx,%edi roll $5,%ecx pxor %xmm8,%xmm7 addl %esi,%ebx xorl %ebp,%edi movdqa %xmm3,%xmm12 rorl $7,%edx paddd %xmm6,%xmm3 addl %ecx,%ebx pxor %xmm13,%xmm7 addl 52(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx movdqa %xmm7,%xmm13 addl %edi,%eax xorl %edx,%esi movdqa %xmm3,32(%rsp) rorl $7,%ecx addl %ebx,%eax addl 56(%rsp),%ebp movups 16(%r15),%xmm1 .byte 102,15,56,220,208 pslld $2,%xmm7 xorl %ecx,%esi movl %eax,%edi psrld $30,%xmm13 roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx por %xmm13,%xmm7 addl %eax,%ebp addl 60(%rsp),%edx pshufd $238,%xmm6,%xmm3 xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx pxor %xmm4,%xmm8 addl 0(%rsp),%ecx xorl %eax,%esi punpcklqdq %xmm7,%xmm3 movl %edx,%edi roll $5,%edx pxor %xmm9,%xmm8 addl %esi,%ecx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi movdqa %xmm12,%xmm13 rorl $7,%ebp paddd %xmm7,%xmm12 addl %edx,%ecx pxor %xmm3,%xmm8 addl 4(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx movdqa %xmm8,%xmm3 addl %edi,%ebx xorl %ebp,%esi movdqa %xmm12,48(%rsp) rorl $7,%edx addl %ecx,%ebx addl 8(%rsp),%eax pslld $2,%xmm8 xorl %edx,%esi movl %ebx,%edi psrld $30,%xmm3 roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx por %xmm3,%xmm8 addl %ebx,%eax addl 12(%rsp),%ebp movups 48(%r15),%xmm1 .byte 102,15,56,220,208 pshufd $238,%xmm7,%xmm12 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp pxor %xmm5,%xmm9 addl 16(%rsp),%edx xorl %ebx,%esi punpcklqdq %xmm8,%xmm12 movl %ebp,%edi roll $5,%ebp pxor %xmm10,%xmm9 addl %esi,%edx xorl %ebx,%edi movdqa %xmm13,%xmm3 rorl $7,%eax paddd %xmm8,%xmm13 addl %ebp,%edx pxor %xmm12,%xmm9 addl 20(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx movdqa %xmm9,%xmm12 addl %edi,%ecx cmpl $11,%r8d jb .Laesenclast2 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je .Laesenclast2 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 .Laesenclast2: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 xorl %eax,%esi movdqa %xmm13,0(%rsp) rorl $7,%ebp addl %edx,%ecx addl 24(%rsp),%ebx pslld $2,%xmm9 xorl %ebp,%esi movl %ecx,%edi psrld $30,%xmm12 roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx por %xmm12,%xmm9 addl %ecx,%ebx addl 28(%rsp),%eax pshufd $238,%xmm8,%xmm13 rorl $7,%ecx movl %ebx,%esi xorl %edx,%edi roll $5,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax pxor %xmm6,%xmm10 addl 32(%rsp),%ebp movups 32(%r12),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,16(%r13,%r12,1) xorps %xmm14,%xmm2 movups -80(%r15),%xmm1 .byte 102,15,56,220,208 andl %ecx,%esi xorl %edx,%ecx rorl $7,%ebx punpcklqdq %xmm9,%xmm13 movl %eax,%edi xorl %ecx,%esi pxor %xmm11,%xmm10 roll $5,%eax addl %esi,%ebp movdqa %xmm3,%xmm12 xorl %ebx,%edi paddd %xmm9,%xmm3 xorl %ecx,%ebx pxor %xmm13,%xmm10 addl %eax,%ebp addl 36(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx rorl $7,%eax movdqa %xmm10,%xmm13 movl %ebp,%esi xorl %ebx,%edi movdqa %xmm3,16(%rsp) roll $5,%ebp addl %edi,%edx movups -64(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%esi pslld $2,%xmm10 xorl %ebx,%eax addl %ebp,%edx psrld $30,%xmm13 addl 40(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax por %xmm13,%xmm10 rorl $7,%ebp movl %edx,%edi xorl %eax,%esi roll $5,%edx pshufd $238,%xmm9,%xmm3 addl %esi,%ecx xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 44(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp rorl $7,%edx movups -48(%r15),%xmm1 .byte 102,15,56,220,208 movl %ecx,%esi xorl %ebp,%edi roll $5,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx pxor %xmm7,%xmm11 addl 48(%rsp),%eax andl %edx,%esi xorl %ebp,%edx rorl $7,%ecx punpcklqdq %xmm10,%xmm3 movl %ebx,%edi xorl %edx,%esi pxor %xmm4,%xmm11 roll $5,%ebx addl %esi,%eax movdqa 48(%r11),%xmm13 xorl %ecx,%edi paddd %xmm10,%xmm12 xorl %edx,%ecx pxor %xmm3,%xmm11 addl %ebx,%eax addl 52(%rsp),%ebp movups -32(%r15),%xmm0 .byte 102,15,56,220,209 andl %ecx,%edi xorl %edx,%ecx rorl $7,%ebx movdqa %xmm11,%xmm3 movl %eax,%esi xorl %ecx,%edi movdqa %xmm12,32(%rsp) roll $5,%eax addl %edi,%ebp xorl %ebx,%esi pslld $2,%xmm11 xorl %ecx,%ebx addl %eax,%ebp psrld $30,%xmm3 addl 56(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx por %xmm3,%xmm11 rorl $7,%eax movl %ebp,%edi xorl %ebx,%esi roll $5,%ebp pshufd $238,%xmm10,%xmm12 addl %esi,%edx movups -16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 60(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax rorl $7,%ebp movl %edx,%esi xorl %eax,%edi roll $5,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx pxor %xmm8,%xmm4 addl 0(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp rorl $7,%edx movups 0(%r15),%xmm0 .byte 102,15,56,220,209 punpcklqdq %xmm11,%xmm12 movl %ecx,%edi xorl %ebp,%esi pxor %xmm5,%xmm4 roll $5,%ecx addl %esi,%ebx movdqa %xmm13,%xmm3 xorl %edx,%edi paddd %xmm11,%xmm13 xorl %ebp,%edx pxor %xmm12,%xmm4 addl %ecx,%ebx addl 4(%rsp),%eax andl %edx,%edi xorl %ebp,%edx rorl $7,%ecx movdqa %xmm4,%xmm12 movl %ebx,%esi xorl %edx,%edi movdqa %xmm13,48(%rsp) roll $5,%ebx addl %edi,%eax xorl %ecx,%esi pslld $2,%xmm4 xorl %edx,%ecx addl %ebx,%eax psrld $30,%xmm12 addl 8(%rsp),%ebp movups 16(%r15),%xmm1 .byte 102,15,56,220,208 andl %ecx,%esi xorl %edx,%ecx por %xmm12,%xmm4 rorl $7,%ebx movl %eax,%edi xorl %ecx,%esi roll $5,%eax pshufd $238,%xmm11,%xmm13 addl %esi,%ebp xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 12(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx rorl $7,%eax movl %ebp,%esi xorl %ebx,%edi roll $5,%ebp addl %edi,%edx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx pxor %xmm9,%xmm5 addl 16(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax rorl $7,%ebp punpcklqdq %xmm4,%xmm13 movl %edx,%edi xorl %eax,%esi pxor %xmm6,%xmm5 roll $5,%edx addl %esi,%ecx movdqa %xmm3,%xmm12 xorl %ebp,%edi paddd %xmm4,%xmm3 xorl %eax,%ebp pxor %xmm13,%xmm5 addl %edx,%ecx addl 20(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp rorl $7,%edx movups 48(%r15),%xmm1 .byte 102,15,56,220,208 movdqa %xmm5,%xmm13 movl %ecx,%esi xorl %ebp,%edi movdqa %xmm3,0(%rsp) roll $5,%ecx addl %edi,%ebx xorl %edx,%esi pslld $2,%xmm5 xorl %ebp,%edx addl %ecx,%ebx psrld $30,%xmm13 addl 24(%rsp),%eax andl %edx,%esi xorl %ebp,%edx por %xmm13,%xmm5 rorl $7,%ecx movl %ebx,%edi xorl %edx,%esi roll $5,%ebx pshufd $238,%xmm4,%xmm3 addl %esi,%eax xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 28(%rsp),%ebp cmpl $11,%r8d jb .Laesenclast3 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je .Laesenclast3 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 .Laesenclast3: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 andl %ecx,%edi xorl %edx,%ecx rorl $7,%ebx movl %eax,%esi xorl %ecx,%edi roll $5,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp pxor %xmm10,%xmm6 addl 32(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx rorl $7,%eax punpcklqdq %xmm5,%xmm3 movl %ebp,%edi xorl %ebx,%esi pxor %xmm7,%xmm6 roll $5,%ebp addl %esi,%edx movups 48(%r12),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,32(%r13,%r12,1) xorps %xmm14,%xmm2 movups -80(%r15),%xmm1 .byte 102,15,56,220,208 movdqa %xmm12,%xmm13 xorl %eax,%edi paddd %xmm5,%xmm12 xorl %ebx,%eax pxor %xmm3,%xmm6 addl %ebp,%edx addl 36(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax rorl $7,%ebp movdqa %xmm6,%xmm3 movl %edx,%esi xorl %eax,%edi movdqa %xmm12,16(%rsp) roll $5,%edx addl %edi,%ecx xorl %ebp,%esi pslld $2,%xmm6 xorl %eax,%ebp addl %edx,%ecx psrld $30,%xmm3 addl 40(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp por %xmm3,%xmm6 rorl $7,%edx movups -64(%r15),%xmm0 .byte 102,15,56,220,209 movl %ecx,%edi xorl %ebp,%esi roll $5,%ecx pshufd $238,%xmm5,%xmm12 addl %esi,%ebx xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 44(%rsp),%eax andl %edx,%edi xorl %ebp,%edx rorl $7,%ecx movl %ebx,%esi xorl %edx,%edi roll $5,%ebx addl %edi,%eax xorl %edx,%esi addl %ebx,%eax pxor %xmm11,%xmm7 addl 48(%rsp),%ebp movups -48(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%esi punpcklqdq %xmm6,%xmm12 movl %eax,%edi roll $5,%eax pxor %xmm8,%xmm7 addl %esi,%ebp xorl %ecx,%edi movdqa %xmm13,%xmm3 rorl $7,%ebx paddd %xmm6,%xmm13 addl %eax,%ebp pxor %xmm12,%xmm7 addl 52(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp movdqa %xmm7,%xmm12 addl %edi,%edx xorl %ebx,%esi movdqa %xmm13,32(%rsp) rorl $7,%eax addl %ebp,%edx addl 56(%rsp),%ecx pslld $2,%xmm7 xorl %eax,%esi movl %edx,%edi psrld $30,%xmm12 roll $5,%edx addl %esi,%ecx movups -32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi rorl $7,%ebp por %xmm12,%xmm7 addl %edx,%ecx addl 60(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx addl 0(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx paddd %xmm7,%xmm3 addl %esi,%eax xorl %edx,%edi movdqa %xmm3,48(%rsp) rorl $7,%ecx addl %ebx,%eax addl 4(%rsp),%ebp movups -16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 8(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax addl %ebp,%edx addl 12(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx movups 0(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx cmpq %r14,%r10 je .Ldone_ssse3 movdqa 64(%r11),%xmm3 movdqa 0(%r11),%xmm13 movdqu 0(%r10),%xmm4 movdqu 16(%r10),%xmm5 movdqu 32(%r10),%xmm6 movdqu 48(%r10),%xmm7 .byte 102,15,56,0,227 addq $64,%r10 addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi .byte 102,15,56,0,235 roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx paddd %xmm13,%xmm4 addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi movdqa %xmm4,0(%rsp) roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx psubd %xmm13,%xmm4 addl %ebx,%eax addl 24(%rsp),%ebp movups 16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%esi movl %eax,%edi roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi .byte 102,15,56,0,243 roll $5,%edx addl %esi,%ecx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi rorl $7,%ebp paddd %xmm13,%xmm5 addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi movdqa %xmm5,16(%rsp) roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx psubd %xmm13,%xmm5 addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx addl %ebx,%eax addl 44(%rsp),%ebp movups 48(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi .byte 102,15,56,0,251 roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax paddd %xmm13,%xmm6 addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi movdqa %xmm6,32(%rsp) roll $5,%edx addl %edi,%ecx cmpl $11,%r8d jb .Laesenclast4 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je .Laesenclast4 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 .Laesenclast4: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 xorl %eax,%esi rorl $7,%ebp psubd %xmm13,%xmm6 addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax rorl $7,%ecx addl %ebx,%eax movups %xmm2,48(%r13,%r12,1) leaq 64(%r12),%r12 addl 0(%r9),%eax addl 4(%r9),%esi addl 8(%r9),%ecx addl 12(%r9),%edx movl %eax,0(%r9) addl 16(%r9),%ebp movl %esi,4(%r9) movl %esi,%ebx movl %ecx,8(%r9) movl %ecx,%edi movl %edx,12(%r9) xorl %edx,%edi movl %ebp,16(%r9) andl %edi,%esi jmp .Loop_ssse3 .Ldone_ssse3: addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax addl 24(%rsp),%ebp movups 16(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%esi movl %eax,%edi roll $5,%eax addl %esi,%ebp xorl %ecx,%edi rorl $7,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi roll $5,%ebp addl %edi,%edx xorl %ebx,%esi rorl $7,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi roll $5,%edx addl %esi,%ecx movups 32(%r15),%xmm0 .byte 102,15,56,220,209 xorl %eax,%edi rorl $7,%ebp addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi roll $5,%ecx addl %edi,%ebx xorl %ebp,%esi rorl $7,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi roll $5,%ebx addl %esi,%eax xorl %edx,%edi rorl $7,%ecx addl %ebx,%eax addl 44(%rsp),%ebp movups 48(%r15),%xmm1 .byte 102,15,56,220,208 xorl %ecx,%edi movl %eax,%esi roll $5,%eax addl %edi,%ebp xorl %ecx,%esi rorl $7,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi roll $5,%ebp addl %esi,%edx xorl %ebx,%edi rorl $7,%eax addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi roll $5,%edx addl %edi,%ecx cmpl $11,%r8d jb .Laesenclast5 movups 64(%r15),%xmm0 .byte 102,15,56,220,209 movups 80(%r15),%xmm1 .byte 102,15,56,220,208 je .Laesenclast5 movups 96(%r15),%xmm0 .byte 102,15,56,220,209 movups 112(%r15),%xmm1 .byte 102,15,56,220,208 .Laesenclast5: .byte 102,15,56,221,209 movups 16-112(%r15),%xmm0 xorl %eax,%esi rorl $7,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi roll $5,%ecx addl %esi,%ebx xorl %ebp,%edi rorl $7,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi roll $5,%ebx addl %edi,%eax rorl $7,%ecx addl %ebx,%eax movups %xmm2,48(%r13,%r12,1) movq 88(%rsp),%r8 addl 0(%r9),%eax addl 4(%r9),%esi addl 8(%r9),%ecx movl %eax,0(%r9) addl 12(%r9),%edx movl %esi,4(%r9) addl 16(%r9),%ebp movl %ecx,8(%r9) movl %edx,12(%r9) movl %ebp,16(%r9) movups %xmm2,(%r8) leaq 104(%rsp),%rsi .cfi_def_cfa %rsi,56 movq 0(%rsi),%r15 .cfi_restore %r15 movq 8(%rsi),%r14 .cfi_restore %r14 movq 16(%rsi),%r13 .cfi_restore %r13 movq 24(%rsi),%r12 .cfi_restore %r12 movq 32(%rsi),%rbp .cfi_restore %rbp movq 40(%rsi),%rbx .cfi_restore %rbx leaq 48(%rsi),%rsp .cfi_def_cfa %rsp,8 .Lepilogue_ssse3: .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha1_enc_ssse3,.-aesni_cbc_sha1_enc_ssse3 .type aesni_cbc_sha1_enc_avx,@function .align 32 aesni_cbc_sha1_enc_avx: .cfi_startproc movq 8(%rsp),%r10 pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 pushq %rbp .cfi_adjust_cfa_offset 8 .cfi_offset %rbp,-24 pushq %r12 .cfi_adjust_cfa_offset 8 .cfi_offset %r12,-32 pushq %r13 .cfi_adjust_cfa_offset 8 .cfi_offset %r13,-40 pushq %r14 .cfi_adjust_cfa_offset 8 .cfi_offset %r14,-48 pushq %r15 .cfi_adjust_cfa_offset 8 .cfi_offset %r15,-56 leaq -104(%rsp),%rsp .cfi_adjust_cfa_offset 104 vzeroall movq %rdi,%r12 movq %rsi,%r13 movq %rdx,%r14 leaq 112(%rcx),%r15 vmovdqu (%r8),%xmm12 movq %r8,88(%rsp) shlq $6,%r14 subq %r12,%r13 movl 240-112(%r15),%r8d addq %r10,%r14 leaq K_XX_XX(%rip),%r11 movl 0(%r9),%eax movl 4(%r9),%ebx movl 8(%r9),%ecx movl 12(%r9),%edx movl %ebx,%esi movl 16(%r9),%ebp movl %ecx,%edi xorl %edx,%edi andl %edi,%esi vmovdqa 64(%r11),%xmm6 vmovdqa 0(%r11),%xmm10 vmovdqu 0(%r10),%xmm0 vmovdqu 16(%r10),%xmm1 vmovdqu 32(%r10),%xmm2 vmovdqu 48(%r10),%xmm3 vpshufb %xmm6,%xmm0,%xmm0 addq $64,%r10 vpshufb %xmm6,%xmm1,%xmm1 vpshufb %xmm6,%xmm2,%xmm2 vpshufb %xmm6,%xmm3,%xmm3 vpaddd %xmm10,%xmm0,%xmm4 vpaddd %xmm10,%xmm1,%xmm5 vpaddd %xmm10,%xmm2,%xmm6 vmovdqa %xmm4,0(%rsp) vmovdqa %xmm5,16(%rsp) vmovdqa %xmm6,32(%rsp) vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 jmp .Loop_avx .align 32 .Loop_avx: shrdl $2,%ebx,%ebx vmovdqu 0(%r12),%xmm13 vpxor %xmm15,%xmm13,%xmm13 vpxor %xmm13,%xmm12,%xmm12 vaesenc %xmm14,%xmm12,%xmm12 vmovups -80(%r15),%xmm15 xorl %edx,%esi vpalignr $8,%xmm0,%xmm1,%xmm4 movl %eax,%edi addl 0(%rsp),%ebp vpaddd %xmm3,%xmm10,%xmm9 xorl %ecx,%ebx shldl $5,%eax,%eax vpsrldq $4,%xmm3,%xmm8 addl %esi,%ebp andl %ebx,%edi vpxor %xmm0,%xmm4,%xmm4 xorl %ecx,%ebx addl %eax,%ebp vpxor %xmm2,%xmm8,%xmm8 shrdl $7,%eax,%eax xorl %ecx,%edi movl %ebp,%esi addl 4(%rsp),%edx vpxor %xmm8,%xmm4,%xmm4 xorl %ebx,%eax shldl $5,%ebp,%ebp vmovdqa %xmm9,48(%rsp) addl %edi,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups -64(%r15),%xmm14 andl %eax,%esi vpsrld $31,%xmm4,%xmm8 xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%esi vpslldq $12,%xmm4,%xmm9 vpaddd %xmm4,%xmm4,%xmm4 movl %edx,%edi addl 8(%rsp),%ecx xorl %eax,%ebp shldl $5,%edx,%edx vpor %xmm8,%xmm4,%xmm4 vpsrld $30,%xmm9,%xmm8 addl %esi,%ecx andl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx vpslld $2,%xmm9,%xmm9 vpxor %xmm8,%xmm4,%xmm4 shrdl $7,%edx,%edx xorl %eax,%edi movl %ecx,%esi addl 12(%rsp),%ebx vaesenc %xmm14,%xmm12,%xmm12 vmovups -48(%r15),%xmm15 vpxor %xmm9,%xmm4,%xmm4 xorl %ebp,%edx shldl $5,%ecx,%ecx addl %edi,%ebx andl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %ebp,%esi vpalignr $8,%xmm1,%xmm2,%xmm5 movl %ebx,%edi addl 16(%rsp),%eax vpaddd %xmm4,%xmm10,%xmm9 xorl %edx,%ecx shldl $5,%ebx,%ebx vpsrldq $4,%xmm4,%xmm8 addl %esi,%eax andl %ecx,%edi vpxor %xmm1,%xmm5,%xmm5 xorl %edx,%ecx addl %ebx,%eax vpxor %xmm3,%xmm8,%xmm8 shrdl $7,%ebx,%ebx vaesenc %xmm15,%xmm12,%xmm12 vmovups -32(%r15),%xmm14 xorl %edx,%edi movl %eax,%esi addl 20(%rsp),%ebp vpxor %xmm8,%xmm5,%xmm5 xorl %ecx,%ebx shldl $5,%eax,%eax vmovdqa %xmm9,0(%rsp) addl %edi,%ebp andl %ebx,%esi vpsrld $31,%xmm5,%xmm8 xorl %ecx,%ebx addl %eax,%ebp shrdl $7,%eax,%eax xorl %ecx,%esi vpslldq $12,%xmm5,%xmm9 vpaddd %xmm5,%xmm5,%xmm5 movl %ebp,%edi addl 24(%rsp),%edx xorl %ebx,%eax shldl $5,%ebp,%ebp vpor %xmm8,%xmm5,%xmm5 vpsrld $30,%xmm9,%xmm8 addl %esi,%edx vaesenc %xmm14,%xmm12,%xmm12 vmovups -16(%r15),%xmm15 andl %eax,%edi xorl %ebx,%eax addl %ebp,%edx vpslld $2,%xmm9,%xmm9 vpxor %xmm8,%xmm5,%xmm5 shrdl $7,%ebp,%ebp xorl %ebx,%edi movl %edx,%esi addl 28(%rsp),%ecx vpxor %xmm9,%xmm5,%xmm5 xorl %eax,%ebp shldl $5,%edx,%edx vmovdqa 16(%r11),%xmm10 addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi vpalignr $8,%xmm2,%xmm3,%xmm6 movl %ecx,%edi addl 32(%rsp),%ebx vaesenc %xmm15,%xmm12,%xmm12 vmovups 0(%r15),%xmm14 vpaddd %xmm5,%xmm10,%xmm9 xorl %ebp,%edx shldl $5,%ecx,%ecx vpsrldq $4,%xmm5,%xmm8 addl %esi,%ebx andl %edx,%edi vpxor %xmm2,%xmm6,%xmm6 xorl %ebp,%edx addl %ecx,%ebx vpxor %xmm4,%xmm8,%xmm8 shrdl $7,%ecx,%ecx xorl %ebp,%edi movl %ebx,%esi addl 36(%rsp),%eax vpxor %xmm8,%xmm6,%xmm6 xorl %edx,%ecx shldl $5,%ebx,%ebx vmovdqa %xmm9,16(%rsp) addl %edi,%eax andl %ecx,%esi vpsrld $31,%xmm6,%xmm8 xorl %edx,%ecx addl %ebx,%eax shrdl $7,%ebx,%ebx vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 xorl %edx,%esi vpslldq $12,%xmm6,%xmm9 vpaddd %xmm6,%xmm6,%xmm6 movl %eax,%edi addl 40(%rsp),%ebp xorl %ecx,%ebx shldl $5,%eax,%eax vpor %xmm8,%xmm6,%xmm6 vpsrld $30,%xmm9,%xmm8 addl %esi,%ebp andl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp vpslld $2,%xmm9,%xmm9 vpxor %xmm8,%xmm6,%xmm6 shrdl $7,%eax,%eax xorl %ecx,%edi movl %ebp,%esi addl 44(%rsp),%edx vpxor %xmm9,%xmm6,%xmm6 xorl %ebx,%eax shldl $5,%ebp,%ebp addl %edi,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 andl %eax,%esi xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%esi vpalignr $8,%xmm3,%xmm4,%xmm7 movl %edx,%edi addl 48(%rsp),%ecx vpaddd %xmm6,%xmm10,%xmm9 xorl %eax,%ebp shldl $5,%edx,%edx vpsrldq $4,%xmm6,%xmm8 addl %esi,%ecx andl %ebp,%edi vpxor %xmm3,%xmm7,%xmm7 xorl %eax,%ebp addl %edx,%ecx vpxor %xmm5,%xmm8,%xmm8 shrdl $7,%edx,%edx xorl %eax,%edi movl %ecx,%esi addl 52(%rsp),%ebx vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 vpxor %xmm8,%xmm7,%xmm7 xorl %ebp,%edx shldl $5,%ecx,%ecx vmovdqa %xmm9,32(%rsp) addl %edi,%ebx andl %edx,%esi vpsrld $31,%xmm7,%xmm8 xorl %ebp,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %ebp,%esi vpslldq $12,%xmm7,%xmm9 vpaddd %xmm7,%xmm7,%xmm7 movl %ebx,%edi addl 56(%rsp),%eax xorl %edx,%ecx shldl $5,%ebx,%ebx vpor %xmm8,%xmm7,%xmm7 vpsrld $30,%xmm9,%xmm8 addl %esi,%eax andl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax vpslld $2,%xmm9,%xmm9 vpxor %xmm8,%xmm7,%xmm7 shrdl $7,%ebx,%ebx cmpl $11,%r8d jb .Lvaesenclast6 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je .Lvaesenclast6 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 .Lvaesenclast6: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 xorl %edx,%edi movl %eax,%esi addl 60(%rsp),%ebp vpxor %xmm9,%xmm7,%xmm7 xorl %ecx,%ebx shldl $5,%eax,%eax addl %edi,%ebp andl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp vpalignr $8,%xmm6,%xmm7,%xmm8 vpxor %xmm4,%xmm0,%xmm0 shrdl $7,%eax,%eax xorl %ecx,%esi movl %ebp,%edi addl 0(%rsp),%edx vpxor %xmm1,%xmm0,%xmm0 xorl %ebx,%eax shldl $5,%ebp,%ebp vpaddd %xmm7,%xmm10,%xmm9 addl %esi,%edx vmovdqu 16(%r12),%xmm13 vpxor %xmm15,%xmm13,%xmm13 vmovups %xmm12,0(%r12,%r13,1) vpxor %xmm13,%xmm12,%xmm12 vaesenc %xmm14,%xmm12,%xmm12 vmovups -80(%r15),%xmm15 andl %eax,%edi vpxor %xmm8,%xmm0,%xmm0 xorl %ebx,%eax addl %ebp,%edx shrdl $7,%ebp,%ebp xorl %ebx,%edi vpsrld $30,%xmm0,%xmm8 vmovdqa %xmm9,48(%rsp) movl %edx,%esi addl 4(%rsp),%ecx xorl %eax,%ebp shldl $5,%edx,%edx vpslld $2,%xmm0,%xmm0 addl %edi,%ecx andl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi movl %ecx,%edi addl 8(%rsp),%ebx vaesenc %xmm15,%xmm12,%xmm12 vmovups -64(%r15),%xmm14 vpor %xmm8,%xmm0,%xmm0 xorl %ebp,%edx shldl $5,%ecx,%ecx addl %esi,%ebx andl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 12(%rsp),%eax xorl %ebp,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpalignr $8,%xmm7,%xmm0,%xmm8 vpxor %xmm5,%xmm1,%xmm1 addl 16(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups -48(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax vpxor %xmm2,%xmm1,%xmm1 addl %esi,%ebp xorl %ecx,%edi vpaddd %xmm0,%xmm10,%xmm9 shrdl $7,%ebx,%ebx addl %eax,%ebp vpxor %xmm8,%xmm1,%xmm1 addl 20(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp vpsrld $30,%xmm1,%xmm8 vmovdqa %xmm9,0(%rsp) addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpslld $2,%xmm1,%xmm1 addl 24(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups -32(%r15),%xmm14 xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vpor %xmm8,%xmm1,%xmm1 addl 28(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpalignr $8,%xmm0,%xmm1,%xmm8 vpxor %xmm6,%xmm2,%xmm2 addl 32(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx vpxor %xmm3,%xmm2,%xmm2 addl %esi,%eax xorl %edx,%edi vpaddd %xmm1,%xmm10,%xmm9 vmovdqa 32(%r11),%xmm10 shrdl $7,%ecx,%ecx addl %ebx,%eax vpxor %xmm8,%xmm2,%xmm2 addl 36(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups -16(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax vpsrld $30,%xmm2,%xmm8 vmovdqa %xmm9,16(%rsp) addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp vpslld $2,%xmm2,%xmm2 addl 40(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx vpor %xmm8,%xmm2,%xmm2 addl 44(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 0(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx vpalignr $8,%xmm1,%xmm2,%xmm8 vpxor %xmm7,%xmm3,%xmm3 addl 48(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx vpxor %xmm4,%xmm3,%xmm3 addl %esi,%ebx xorl %ebp,%edi vpaddd %xmm2,%xmm10,%xmm9 shrdl $7,%edx,%edx addl %ecx,%ebx vpxor %xmm8,%xmm3,%xmm3 addl 52(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx vpsrld $30,%xmm3,%xmm8 vmovdqa %xmm9,32(%rsp) addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpslld $2,%xmm3,%xmm3 addl 56(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp vpor %xmm8,%xmm3,%xmm3 addl 60(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpalignr $8,%xmm2,%xmm3,%xmm8 vpxor %xmm0,%xmm4,%xmm4 addl 0(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx vpxor %xmm5,%xmm4,%xmm4 addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 xorl %eax,%edi vpaddd %xmm3,%xmm10,%xmm9 shrdl $7,%ebp,%ebp addl %edx,%ecx vpxor %xmm8,%xmm4,%xmm4 addl 4(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx vpsrld $30,%xmm4,%xmm8 vmovdqa %xmm9,48(%rsp) addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpslld $2,%xmm4,%xmm4 addl 8(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax vpor %xmm8,%xmm4,%xmm4 addl 12(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp vpalignr $8,%xmm3,%xmm4,%xmm8 vpxor %xmm1,%xmm5,%xmm5 addl 16(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp vpxor %xmm6,%xmm5,%xmm5 addl %esi,%edx xorl %ebx,%edi vpaddd %xmm4,%xmm10,%xmm9 shrdl $7,%eax,%eax addl %ebp,%edx vpxor %xmm8,%xmm5,%xmm5 addl 20(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx vpsrld $30,%xmm5,%xmm8 vmovdqa %xmm9,0(%rsp) addl %edi,%ecx cmpl $11,%r8d jb .Lvaesenclast7 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je .Lvaesenclast7 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 .Lvaesenclast7: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx vpslld $2,%xmm5,%xmm5 addl 24(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx vpor %xmm8,%xmm5,%xmm5 addl 28(%rsp),%eax shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax vpalignr $8,%xmm4,%xmm5,%xmm8 vpxor %xmm2,%xmm6,%xmm6 addl 32(%rsp),%ebp vmovdqu 32(%r12),%xmm13 vpxor %xmm15,%xmm13,%xmm13 vmovups %xmm12,16(%r13,%r12,1) vpxor %xmm13,%xmm12,%xmm12 vaesenc %xmm14,%xmm12,%xmm12 vmovups -80(%r15),%xmm15 andl %ecx,%esi xorl %edx,%ecx shrdl $7,%ebx,%ebx vpxor %xmm7,%xmm6,%xmm6 movl %eax,%edi xorl %ecx,%esi vpaddd %xmm5,%xmm10,%xmm9 shldl $5,%eax,%eax addl %esi,%ebp vpxor %xmm8,%xmm6,%xmm6 xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 36(%rsp),%edx vpsrld $30,%xmm6,%xmm8 vmovdqa %xmm9,16(%rsp) andl %ebx,%edi xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%esi vpslld $2,%xmm6,%xmm6 xorl %ebx,%edi shldl $5,%ebp,%ebp addl %edi,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups -64(%r15),%xmm14 xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx addl 40(%rsp),%ecx andl %eax,%esi vpor %xmm8,%xmm6,%xmm6 xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%edi xorl %eax,%esi shldl $5,%edx,%edx addl %esi,%ecx xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 44(%rsp),%ebx andl %ebp,%edi xorl %eax,%ebp shrdl $7,%edx,%edx vaesenc %xmm14,%xmm12,%xmm12 vmovups -48(%r15),%xmm15 movl %ecx,%esi xorl %ebp,%edi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx vpalignr $8,%xmm5,%xmm6,%xmm8 vpxor %xmm3,%xmm7,%xmm7 addl 48(%rsp),%eax andl %edx,%esi xorl %ebp,%edx shrdl $7,%ecx,%ecx vpxor %xmm0,%xmm7,%xmm7 movl %ebx,%edi xorl %edx,%esi vpaddd %xmm6,%xmm10,%xmm9 vmovdqa 48(%r11),%xmm10 shldl $5,%ebx,%ebx addl %esi,%eax vpxor %xmm8,%xmm7,%xmm7 xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 52(%rsp),%ebp vaesenc %xmm15,%xmm12,%xmm12 vmovups -32(%r15),%xmm14 vpsrld $30,%xmm7,%xmm8 vmovdqa %xmm9,32(%rsp) andl %ecx,%edi xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi vpslld $2,%xmm7,%xmm7 xorl %ecx,%edi shldl $5,%eax,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp addl 56(%rsp),%edx andl %ebx,%esi vpor %xmm8,%xmm7,%xmm7 xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%edi xorl %ebx,%esi shldl $5,%ebp,%ebp addl %esi,%edx vaesenc %xmm14,%xmm12,%xmm12 vmovups -16(%r15),%xmm15 xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 60(%rsp),%ecx andl %eax,%edi xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%esi xorl %eax,%edi shldl $5,%edx,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx vpalignr $8,%xmm6,%xmm7,%xmm8 vpxor %xmm4,%xmm0,%xmm0 addl 0(%rsp),%ebx andl %ebp,%esi xorl %eax,%ebp shrdl $7,%edx,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups 0(%r15),%xmm14 vpxor %xmm1,%xmm0,%xmm0 movl %ecx,%edi xorl %ebp,%esi vpaddd %xmm7,%xmm10,%xmm9 shldl $5,%ecx,%ecx addl %esi,%ebx vpxor %xmm8,%xmm0,%xmm0 xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 4(%rsp),%eax vpsrld $30,%xmm0,%xmm8 vmovdqa %xmm9,48(%rsp) andl %edx,%edi xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi vpslld $2,%xmm0,%xmm0 xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax addl 8(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 andl %ecx,%esi vpor %xmm8,%xmm0,%xmm0 xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%edi xorl %ecx,%esi shldl $5,%eax,%eax addl %esi,%ebp xorl %ebx,%edi xorl %ecx,%ebx addl %eax,%ebp addl 12(%rsp),%edx andl %ebx,%edi xorl %ecx,%ebx shrdl $7,%eax,%eax movl %ebp,%esi xorl %ebx,%edi shldl $5,%ebp,%ebp addl %edi,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 xorl %eax,%esi xorl %ebx,%eax addl %ebp,%edx vpalignr $8,%xmm7,%xmm0,%xmm8 vpxor %xmm5,%xmm1,%xmm1 addl 16(%rsp),%ecx andl %eax,%esi xorl %ebx,%eax shrdl $7,%ebp,%ebp vpxor %xmm2,%xmm1,%xmm1 movl %edx,%edi xorl %eax,%esi vpaddd %xmm0,%xmm10,%xmm9 shldl $5,%edx,%edx addl %esi,%ecx vpxor %xmm8,%xmm1,%xmm1 xorl %ebp,%edi xorl %eax,%ebp addl %edx,%ecx addl 20(%rsp),%ebx vpsrld $30,%xmm1,%xmm8 vmovdqa %xmm9,0(%rsp) andl %ebp,%edi xorl %eax,%ebp shrdl $7,%edx,%edx vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 movl %ecx,%esi vpslld $2,%xmm1,%xmm1 xorl %ebp,%edi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %edx,%esi xorl %ebp,%edx addl %ecx,%ebx addl 24(%rsp),%eax andl %edx,%esi vpor %xmm8,%xmm1,%xmm1 xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%edi xorl %edx,%esi shldl $5,%ebx,%ebx addl %esi,%eax xorl %ecx,%edi xorl %edx,%ecx addl %ebx,%eax addl 28(%rsp),%ebp cmpl $11,%r8d jb .Lvaesenclast8 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je .Lvaesenclast8 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 .Lvaesenclast8: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 andl %ecx,%edi xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi xorl %ecx,%edi shldl $5,%eax,%eax addl %edi,%ebp xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%ebp vpalignr $8,%xmm0,%xmm1,%xmm8 vpxor %xmm6,%xmm2,%xmm2 addl 32(%rsp),%edx andl %ebx,%esi xorl %ecx,%ebx shrdl $7,%eax,%eax vpxor %xmm3,%xmm2,%xmm2 movl %ebp,%edi xorl %ebx,%esi vpaddd %xmm1,%xmm10,%xmm9 shldl $5,%ebp,%ebp addl %esi,%edx vmovdqu 48(%r12),%xmm13 vpxor %xmm15,%xmm13,%xmm13 vmovups %xmm12,32(%r13,%r12,1) vpxor %xmm13,%xmm12,%xmm12 vaesenc %xmm14,%xmm12,%xmm12 vmovups -80(%r15),%xmm15 vpxor %xmm8,%xmm2,%xmm2 xorl %eax,%edi xorl %ebx,%eax addl %ebp,%edx addl 36(%rsp),%ecx vpsrld $30,%xmm2,%xmm8 vmovdqa %xmm9,16(%rsp) andl %eax,%edi xorl %ebx,%eax shrdl $7,%ebp,%ebp movl %edx,%esi vpslld $2,%xmm2,%xmm2 xorl %eax,%edi shldl $5,%edx,%edx addl %edi,%ecx xorl %ebp,%esi xorl %eax,%ebp addl %edx,%ecx addl 40(%rsp),%ebx andl %ebp,%esi vpor %xmm8,%xmm2,%xmm2 xorl %eax,%ebp shrdl $7,%edx,%edx vaesenc %xmm15,%xmm12,%xmm12 vmovups -64(%r15),%xmm14 movl %ecx,%edi xorl %ebp,%esi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edx,%edi xorl %ebp,%edx addl %ecx,%ebx addl 44(%rsp),%eax andl %edx,%edi xorl %ebp,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%edi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi addl %ebx,%eax vpalignr $8,%xmm1,%xmm2,%xmm8 vpxor %xmm7,%xmm3,%xmm3 addl 48(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups -48(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax vpxor %xmm4,%xmm3,%xmm3 addl %esi,%ebp xorl %ecx,%edi vpaddd %xmm2,%xmm10,%xmm9 shrdl $7,%ebx,%ebx addl %eax,%ebp vpxor %xmm8,%xmm3,%xmm3 addl 52(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp vpsrld $30,%xmm3,%xmm8 vmovdqa %xmm9,32(%rsp) addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx vpslld $2,%xmm3,%xmm3 addl 56(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups -32(%r15),%xmm14 xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vpor %xmm8,%xmm3,%xmm3 addl 60(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 0(%rsp),%eax vpaddd %xmm3,%xmm10,%xmm9 xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax vmovdqa %xmm9,48(%rsp) xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 4(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups -16(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 8(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx addl 12(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 0(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx cmpq %r14,%r10 je .Ldone_avx vmovdqa 64(%r11),%xmm9 vmovdqa 0(%r11),%xmm10 vmovdqu 0(%r10),%xmm0 vmovdqu 16(%r10),%xmm1 vmovdqu 32(%r10),%xmm2 vmovdqu 48(%r10),%xmm3 vpshufb %xmm9,%xmm0,%xmm0 addq $64,%r10 addl 16(%rsp),%ebx xorl %ebp,%esi vpshufb %xmm9,%xmm1,%xmm1 movl %ecx,%edi shldl $5,%ecx,%ecx vpaddd %xmm10,%xmm0,%xmm8 addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx vmovdqa %xmm8,0(%rsp) addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi vpshufb %xmm9,%xmm2,%xmm2 movl %edx,%edi shldl $5,%edx,%edx vpaddd %xmm10,%xmm1,%xmm8 addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx vmovdqa %xmm8,16(%rsp) addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi vpshufb %xmm9,%xmm3,%xmm3 movl %ebp,%edi shldl $5,%ebp,%ebp vpaddd %xmm10,%xmm2,%xmm8 addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx vmovdqa %xmm8,32(%rsp) addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx cmpl $11,%r8d jb .Lvaesenclast9 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je .Lvaesenclast9 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 .Lvaesenclast9: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax vmovups %xmm12,48(%r13,%r12,1) leaq 64(%r12),%r12 addl 0(%r9),%eax addl 4(%r9),%esi addl 8(%r9),%ecx addl 12(%r9),%edx movl %eax,0(%r9) addl 16(%r9),%ebp movl %esi,4(%r9) movl %esi,%ebx movl %ecx,8(%r9) movl %ecx,%edi movl %edx,12(%r9) xorl %edx,%edi movl %ebp,16(%r9) andl %edi,%esi jmp .Loop_avx .Ldone_avx: addl 16(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 20(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 16(%r15),%xmm15 xorl %ecx,%esi movl %eax,%edi shldl $5,%eax,%eax addl %esi,%ebp xorl %ecx,%edi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 28(%rsp),%edx xorl %ebx,%edi movl %ebp,%esi shldl $5,%ebp,%ebp addl %edi,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %ebp,%edx addl 32(%rsp),%ecx xorl %eax,%esi movl %edx,%edi shldl $5,%edx,%edx addl %esi,%ecx vaesenc %xmm15,%xmm12,%xmm12 vmovups 32(%r15),%xmm14 xorl %eax,%edi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 36(%rsp),%ebx xorl %ebp,%edi movl %ecx,%esi shldl $5,%ecx,%ecx addl %edi,%ebx xorl %ebp,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%rsp),%eax xorl %edx,%esi movl %ebx,%edi shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%edi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%rsp),%ebp vaesenc %xmm14,%xmm12,%xmm12 vmovups 48(%r15),%xmm15 xorl %ecx,%edi movl %eax,%esi shldl $5,%eax,%eax addl %edi,%ebp xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%ebp addl 48(%rsp),%edx xorl %ebx,%esi movl %ebp,%edi shldl $5,%ebp,%ebp addl %esi,%edx xorl %ebx,%edi shrdl $7,%eax,%eax addl %ebp,%edx addl 52(%rsp),%ecx xorl %eax,%edi movl %edx,%esi shldl $5,%edx,%edx addl %edi,%ecx cmpl $11,%r8d jb .Lvaesenclast10 vaesenc %xmm15,%xmm12,%xmm12 vmovups 64(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 80(%r15),%xmm15 je .Lvaesenclast10 vaesenc %xmm15,%xmm12,%xmm12 vmovups 96(%r15),%xmm14 vaesenc %xmm14,%xmm12,%xmm12 vmovups 112(%r15),%xmm15 .Lvaesenclast10: vaesenclast %xmm15,%xmm12,%xmm12 vmovups -112(%r15),%xmm15 vmovups 16-112(%r15),%xmm14 xorl %eax,%esi shrdl $7,%ebp,%ebp addl %edx,%ecx addl 56(%rsp),%ebx xorl %ebp,%esi movl %ecx,%edi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %ebp,%edi shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%rsp),%eax xorl %edx,%edi movl %ebx,%esi shldl $5,%ebx,%ebx addl %edi,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax vmovups %xmm12,48(%r13,%r12,1) movq 88(%rsp),%r8 addl 0(%r9),%eax addl 4(%r9),%esi addl 8(%r9),%ecx movl %eax,0(%r9) addl 12(%r9),%edx movl %esi,4(%r9) addl 16(%r9),%ebp movl %ecx,8(%r9) movl %edx,12(%r9) movl %ebp,16(%r9) vmovups %xmm12,(%r8) vzeroall leaq 104(%rsp),%rsi .cfi_def_cfa %rsi,56 movq 0(%rsi),%r15 .cfi_restore %r15 movq 8(%rsi),%r14 .cfi_restore %r14 movq 16(%rsi),%r13 .cfi_restore %r13 movq 24(%rsi),%r12 .cfi_restore %r12 movq 32(%rsi),%rbp .cfi_restore %rbp movq 40(%rsi),%rbx .cfi_restore %rbx leaq 48(%rsi),%rsp .cfi_def_cfa %rsp,8 .Lepilogue_avx: .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha1_enc_avx,.-aesni_cbc_sha1_enc_avx .section .rodata .align 64 K_XX_XX: .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f .byte 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,49,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .text .align 64 .type aesni_cbc_sha1_enc_shaext,@function .align 32 aesni_cbc_sha1_enc_shaext: .cfi_startproc movq 8(%rsp),%r10 movdqu (%r9),%xmm8 movd 16(%r9),%xmm9 movdqa K_XX_XX+80(%rip),%xmm7 movl 240(%rcx),%r11d subq %rdi,%rsi movups (%rcx),%xmm15 movups (%r8),%xmm2 movups 16(%rcx),%xmm0 leaq 112(%rcx),%rcx pshufd $27,%xmm8,%xmm8 pshufd $27,%xmm9,%xmm9 jmp .Loop_shaext .align 16 .Loop_shaext: movups 0(%rdi),%xmm14 xorps %xmm15,%xmm14 xorps %xmm14,%xmm2 movups -80(%rcx),%xmm1 .byte 102,15,56,220,208 movdqu (%r10),%xmm3 movdqa %xmm9,%xmm12 .byte 102,15,56,0,223 movdqu 16(%r10),%xmm4 movdqa %xmm8,%xmm11 movups -64(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 102,15,56,0,231 paddd %xmm3,%xmm9 movdqu 32(%r10),%xmm5 leaq 64(%r10),%r10 pxor %xmm12,%xmm3 movups -48(%rcx),%xmm1 .byte 102,15,56,220,208 pxor %xmm12,%xmm3 movdqa %xmm8,%xmm10 .byte 102,15,56,0,239 .byte 69,15,58,204,193,0 .byte 68,15,56,200,212 movups -32(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 15,56,201,220 movdqu -16(%r10),%xmm6 movdqa %xmm8,%xmm9 .byte 102,15,56,0,247 movups -16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 69,15,58,204,194,0 .byte 68,15,56,200,205 pxor %xmm5,%xmm3 .byte 15,56,201,229 movups 0(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,0 .byte 68,15,56,200,214 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,222 pxor %xmm6,%xmm4 .byte 15,56,201,238 movups 32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,0 .byte 68,15,56,200,203 movups 48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,227 pxor %xmm3,%xmm5 .byte 15,56,201,243 cmpl $11,%r11d jb .Laesenclast11 movups 64(%rcx),%xmm0 .byte 102,15,56,220,209 movups 80(%rcx),%xmm1 .byte 102,15,56,220,208 je .Laesenclast11 movups 96(%rcx),%xmm0 .byte 102,15,56,220,209 movups 112(%rcx),%xmm1 .byte 102,15,56,220,208 .Laesenclast11: .byte 102,15,56,221,209 movups 16-112(%rcx),%xmm0 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,0 .byte 68,15,56,200,212 movups 16(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,0(%rsi,%rdi,1) xorps %xmm14,%xmm2 movups -80(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,236 pxor %xmm4,%xmm6 .byte 15,56,201,220 movups -64(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,1 .byte 68,15,56,200,205 movups -48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,245 pxor %xmm5,%xmm3 .byte 15,56,201,229 movups -32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,1 .byte 68,15,56,200,214 movups -16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,222 pxor %xmm6,%xmm4 .byte 15,56,201,238 movups 0(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,1 .byte 68,15,56,200,203 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,227 pxor %xmm3,%xmm5 .byte 15,56,201,243 movups 32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,1 .byte 68,15,56,200,212 movups 48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,236 pxor %xmm4,%xmm6 .byte 15,56,201,220 cmpl $11,%r11d jb .Laesenclast12 movups 64(%rcx),%xmm0 .byte 102,15,56,220,209 movups 80(%rcx),%xmm1 .byte 102,15,56,220,208 je .Laesenclast12 movups 96(%rcx),%xmm0 .byte 102,15,56,220,209 movups 112(%rcx),%xmm1 .byte 102,15,56,220,208 .Laesenclast12: .byte 102,15,56,221,209 movups 16-112(%rcx),%xmm0 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,1 .byte 68,15,56,200,205 movups 32(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,16(%rsi,%rdi,1) xorps %xmm14,%xmm2 movups -80(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,245 pxor %xmm5,%xmm3 .byte 15,56,201,229 movups -64(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,2 .byte 68,15,56,200,214 movups -48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,222 pxor %xmm6,%xmm4 .byte 15,56,201,238 movups -32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,2 .byte 68,15,56,200,203 movups -16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,227 pxor %xmm3,%xmm5 .byte 15,56,201,243 movups 0(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,2 .byte 68,15,56,200,212 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,236 pxor %xmm4,%xmm6 .byte 15,56,201,220 movups 32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,2 .byte 68,15,56,200,205 movups 48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,245 pxor %xmm5,%xmm3 .byte 15,56,201,229 cmpl $11,%r11d jb .Laesenclast13 movups 64(%rcx),%xmm0 .byte 102,15,56,220,209 movups 80(%rcx),%xmm1 .byte 102,15,56,220,208 je .Laesenclast13 movups 96(%rcx),%xmm0 .byte 102,15,56,220,209 movups 112(%rcx),%xmm1 .byte 102,15,56,220,208 .Laesenclast13: .byte 102,15,56,221,209 movups 16-112(%rcx),%xmm0 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,2 .byte 68,15,56,200,214 movups 48(%rdi),%xmm14 xorps %xmm15,%xmm14 movups %xmm2,32(%rsi,%rdi,1) xorps %xmm14,%xmm2 movups -80(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,222 pxor %xmm6,%xmm4 .byte 15,56,201,238 movups -64(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,3 .byte 68,15,56,200,203 movups -48(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 15,56,202,227 pxor %xmm3,%xmm5 .byte 15,56,201,243 movups -32(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,3 .byte 68,15,56,200,212 .byte 15,56,202,236 pxor %xmm4,%xmm6 movups -16(%rcx),%xmm1 .byte 102,15,56,220,208 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,3 .byte 68,15,56,200,205 .byte 15,56,202,245 movups 0(%rcx),%xmm0 .byte 102,15,56,220,209 movdqa %xmm12,%xmm5 movdqa %xmm8,%xmm10 .byte 69,15,58,204,193,3 .byte 68,15,56,200,214 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 movdqa %xmm8,%xmm9 .byte 69,15,58,204,194,3 .byte 68,15,56,200,205 movups 32(%rcx),%xmm0 .byte 102,15,56,220,209 movups 48(%rcx),%xmm1 .byte 102,15,56,220,208 cmpl $11,%r11d jb .Laesenclast14 movups 64(%rcx),%xmm0 .byte 102,15,56,220,209 movups 80(%rcx),%xmm1 .byte 102,15,56,220,208 je .Laesenclast14 movups 96(%rcx),%xmm0 .byte 102,15,56,220,209 movups 112(%rcx),%xmm1 .byte 102,15,56,220,208 .Laesenclast14: .byte 102,15,56,221,209 movups 16-112(%rcx),%xmm0 decq %rdx paddd %xmm11,%xmm8 movups %xmm2,48(%rsi,%rdi,1) leaq 64(%rdi),%rdi jnz .Loop_shaext pshufd $27,%xmm8,%xmm8 pshufd $27,%xmm9,%xmm9 movups %xmm2,(%r8) movdqu %xmm8,(%r9) movd %xmm9,16(%r9) .byte 0xf3,0xc3 .cfi_endproc .size aesni_cbc_sha1_enc_shaext,.-aesni_cbc_sha1_enc_shaext #endif
marvin-hansen/iggy-streaming-system
3,671
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86/crypto/test/trampoline-x86.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__) .text .globl _abi_test_trampoline .private_extern _abi_test_trampoline .align 4 _abi_test_trampoline: L_abi_test_trampoline_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi movl 24(%esp),%ecx movl (%ecx),%esi movl 4(%ecx),%edi movl 8(%ecx),%ebx movl 12(%ecx),%ebp subl $44,%esp movl 72(%esp),%eax xorl %ecx,%ecx L000loop: cmpl 76(%esp),%ecx jae L001loop_done movl (%eax,%ecx,4),%edx movl %edx,(%esp,%ecx,4) addl $1,%ecx jmp L000loop L001loop_done: call *64(%esp) addl $44,%esp movl 24(%esp),%ecx movl %esi,(%ecx) movl %edi,4(%ecx) movl %ebx,8(%ecx) movl %ebp,12(%ecx) popl %edi popl %esi popl %ebx popl %ebp ret .globl _abi_test_get_and_clear_direction_flag .private_extern _abi_test_get_and_clear_direction_flag .align 4 _abi_test_get_and_clear_direction_flag: L_abi_test_get_and_clear_direction_flag_begin: pushfl popl %eax andl $1024,%eax shrl $10,%eax cld ret .globl _abi_test_set_direction_flag .private_extern _abi_test_set_direction_flag .align 4 _abi_test_set_direction_flag: L_abi_test_set_direction_flag_begin: std ret .globl _abi_test_clobber_eax .private_extern _abi_test_clobber_eax .align 4 _abi_test_clobber_eax: L_abi_test_clobber_eax_begin: xorl %eax,%eax ret .globl _abi_test_clobber_ebx .private_extern _abi_test_clobber_ebx .align 4 _abi_test_clobber_ebx: L_abi_test_clobber_ebx_begin: xorl %ebx,%ebx ret .globl _abi_test_clobber_ecx .private_extern _abi_test_clobber_ecx .align 4 _abi_test_clobber_ecx: L_abi_test_clobber_ecx_begin: xorl %ecx,%ecx ret .globl _abi_test_clobber_edx .private_extern _abi_test_clobber_edx .align 4 _abi_test_clobber_edx: L_abi_test_clobber_edx_begin: xorl %edx,%edx ret .globl _abi_test_clobber_edi .private_extern _abi_test_clobber_edi .align 4 _abi_test_clobber_edi: L_abi_test_clobber_edi_begin: xorl %edi,%edi ret .globl _abi_test_clobber_esi .private_extern _abi_test_clobber_esi .align 4 _abi_test_clobber_esi: L_abi_test_clobber_esi_begin: xorl %esi,%esi ret .globl _abi_test_clobber_ebp .private_extern _abi_test_clobber_ebp .align 4 _abi_test_clobber_ebp: L_abi_test_clobber_ebp_begin: xorl %ebp,%ebp ret .globl _abi_test_clobber_xmm0 .private_extern _abi_test_clobber_xmm0 .align 4 _abi_test_clobber_xmm0: L_abi_test_clobber_xmm0_begin: pxor %xmm0,%xmm0 ret .globl _abi_test_clobber_xmm1 .private_extern _abi_test_clobber_xmm1 .align 4 _abi_test_clobber_xmm1: L_abi_test_clobber_xmm1_begin: pxor %xmm1,%xmm1 ret .globl _abi_test_clobber_xmm2 .private_extern _abi_test_clobber_xmm2 .align 4 _abi_test_clobber_xmm2: L_abi_test_clobber_xmm2_begin: pxor %xmm2,%xmm2 ret .globl _abi_test_clobber_xmm3 .private_extern _abi_test_clobber_xmm3 .align 4 _abi_test_clobber_xmm3: L_abi_test_clobber_xmm3_begin: pxor %xmm3,%xmm3 ret .globl _abi_test_clobber_xmm4 .private_extern _abi_test_clobber_xmm4 .align 4 _abi_test_clobber_xmm4: L_abi_test_clobber_xmm4_begin: pxor %xmm4,%xmm4 ret .globl _abi_test_clobber_xmm5 .private_extern _abi_test_clobber_xmm5 .align 4 _abi_test_clobber_xmm5: L_abi_test_clobber_xmm5_begin: pxor %xmm5,%xmm5 ret .globl _abi_test_clobber_xmm6 .private_extern _abi_test_clobber_xmm6 .align 4 _abi_test_clobber_xmm6: L_abi_test_clobber_xmm6_begin: pxor %xmm6,%xmm6 ret .globl _abi_test_clobber_xmm7 .private_extern _abi_test_clobber_xmm7 .align 4 _abi_test_clobber_xmm7: L_abi_test_clobber_xmm7_begin: pxor %xmm7,%xmm7 ret #endif // !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
98,564
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86/crypto/fipsmodule/sha256-586.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__) .text .globl _sha256_block_data_order_nohw .private_extern _sha256_block_data_order_nohw .align 4 _sha256_block_data_order_nohw: L_sha256_block_data_order_nohw_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi movl 20(%esp),%esi movl 24(%esp),%edi movl 28(%esp),%eax movl %esp,%ebx call L000pic_point L000pic_point: popl %ebp leal LK256-L000pic_point(%ebp),%ebp subl $16,%esp andl $-64,%esp shll $6,%eax addl %edi,%eax movl %esi,(%esp) movl %edi,4(%esp) movl %eax,8(%esp) movl %ebx,12(%esp) L001no_xmm: subl %edi,%eax cmpl $256,%eax jae L002unrolled jmp L003loop .align 4,0x90 L003loop: movl (%edi),%eax movl 4(%edi),%ebx movl 8(%edi),%ecx bswap %eax movl 12(%edi),%edx bswap %ebx pushl %eax bswap %ecx pushl %ebx bswap %edx pushl %ecx pushl %edx movl 16(%edi),%eax movl 20(%edi),%ebx movl 24(%edi),%ecx bswap %eax movl 28(%edi),%edx bswap %ebx pushl %eax bswap %ecx pushl %ebx bswap %edx pushl %ecx pushl %edx movl 32(%edi),%eax movl 36(%edi),%ebx movl 40(%edi),%ecx bswap %eax movl 44(%edi),%edx bswap %ebx pushl %eax bswap %ecx pushl %ebx bswap %edx pushl %ecx pushl %edx movl 48(%edi),%eax movl 52(%edi),%ebx movl 56(%edi),%ecx bswap %eax movl 60(%edi),%edx bswap %ebx pushl %eax bswap %ecx pushl %ebx bswap %edx pushl %ecx pushl %edx addl $64,%edi leal -36(%esp),%esp movl %edi,104(%esp) movl (%esi),%eax movl 4(%esi),%ebx movl 8(%esi),%ecx movl 12(%esi),%edi movl %ebx,8(%esp) xorl %ecx,%ebx movl %ecx,12(%esp) movl %edi,16(%esp) movl %ebx,(%esp) movl 16(%esi),%edx movl 20(%esi),%ebx movl 24(%esi),%ecx movl 28(%esi),%edi movl %ebx,24(%esp) movl %ecx,28(%esp) movl %edi,32(%esp) .align 4,0x90 L00400_15: movl %edx,%ecx movl 24(%esp),%esi rorl $14,%ecx movl 28(%esp),%edi xorl %edx,%ecx xorl %edi,%esi movl 96(%esp),%ebx rorl $5,%ecx andl %edx,%esi movl %edx,20(%esp) xorl %ecx,%edx addl 32(%esp),%ebx xorl %edi,%esi rorl $6,%edx movl %eax,%ecx addl %esi,%ebx rorl $9,%ecx addl %edx,%ebx movl 8(%esp),%edi xorl %eax,%ecx movl %eax,4(%esp) leal -4(%esp),%esp rorl $11,%ecx movl (%ebp),%esi xorl %eax,%ecx movl 20(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %esi,%ebx movl %eax,(%esp) addl %ebx,%edx andl 4(%esp),%eax addl %ecx,%ebx xorl %edi,%eax addl $4,%ebp addl %ebx,%eax cmpl $3248222580,%esi jne L00400_15 movl 156(%esp),%ecx jmp L00516_63 .align 4,0x90 L00516_63: movl %ecx,%ebx movl 104(%esp),%esi rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 160(%esp),%ebx shrl $10,%edi addl 124(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 24(%esp),%esi rorl $14,%ecx addl %edi,%ebx movl 28(%esp),%edi xorl %edx,%ecx xorl %edi,%esi movl %ebx,96(%esp) rorl $5,%ecx andl %edx,%esi movl %edx,20(%esp) xorl %ecx,%edx addl 32(%esp),%ebx xorl %edi,%esi rorl $6,%edx movl %eax,%ecx addl %esi,%ebx rorl $9,%ecx addl %edx,%ebx movl 8(%esp),%edi xorl %eax,%ecx movl %eax,4(%esp) leal -4(%esp),%esp rorl $11,%ecx movl (%ebp),%esi xorl %eax,%ecx movl 20(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %esi,%ebx movl %eax,(%esp) addl %ebx,%edx andl 4(%esp),%eax addl %ecx,%ebx xorl %edi,%eax movl 156(%esp),%ecx addl $4,%ebp addl %ebx,%eax cmpl $3329325298,%esi jne L00516_63 movl 356(%esp),%esi movl 8(%esp),%ebx movl 16(%esp),%ecx addl (%esi),%eax addl 4(%esi),%ebx addl 8(%esi),%edi addl 12(%esi),%ecx movl %eax,(%esi) movl %ebx,4(%esi) movl %edi,8(%esi) movl %ecx,12(%esi) movl 24(%esp),%eax movl 28(%esp),%ebx movl 32(%esp),%ecx movl 360(%esp),%edi addl 16(%esi),%edx addl 20(%esi),%eax addl 24(%esi),%ebx addl 28(%esi),%ecx movl %edx,16(%esi) movl %eax,20(%esi) movl %ebx,24(%esi) movl %ecx,28(%esi) leal 356(%esp),%esp subl $256,%ebp cmpl 8(%esp),%edi jb L003loop movl 12(%esp),%esp popl %edi popl %esi popl %ebx popl %ebp ret .align 6,0x90 LK256: .long 1116352408,1899447441,3049323471,3921009573,961987163,1508970993,2453635748,2870763221,3624381080,310598401,607225278,1426881987,1925078388,2162078206,2614888103,3248222580,3835390401,4022224774,264347078,604807628,770255983,1249150122,1555081692,1996064986,2554220882,2821834349,2952996808,3210313671,3336571891,3584528711,113926993,338241895,666307205,773529912,1294757372,1396182291,1695183700,1986661051,2177026350,2456956037,2730485921,2820302411,3259730800,3345764771,3516065817,3600352804,4094571909,275423344,430227734,506948616,659060556,883997877,958139571,1322822218,1537002063,1747873779,1955562222,2024104815,2227730452,2361852424,2428436474,2756734187,3204031479,3329325298 .long 66051,67438087,134810123,202182159 .byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97 .byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32 .byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97 .byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103 .byte 62,0 .align 4,0x90 L002unrolled: leal -96(%esp),%esp movl (%esi),%eax movl 4(%esi),%ebp movl 8(%esi),%ecx movl 12(%esi),%ebx movl %ebp,4(%esp) xorl %ecx,%ebp movl %ecx,8(%esp) movl %ebx,12(%esp) movl 16(%esi),%edx movl 20(%esi),%ebx movl 24(%esi),%ecx movl 28(%esi),%esi movl %ebx,20(%esp) movl %ecx,24(%esp) movl %esi,28(%esp) jmp L006grand_loop .align 4,0x90 L006grand_loop: movl (%edi),%ebx movl 4(%edi),%ecx bswap %ebx movl 8(%edi),%esi bswap %ecx movl %ebx,32(%esp) bswap %esi movl %ecx,36(%esp) movl %esi,40(%esp) movl 12(%edi),%ebx movl 16(%edi),%ecx bswap %ebx movl 20(%edi),%esi bswap %ecx movl %ebx,44(%esp) bswap %esi movl %ecx,48(%esp) movl %esi,52(%esp) movl 24(%edi),%ebx movl 28(%edi),%ecx bswap %ebx movl 32(%edi),%esi bswap %ecx movl %ebx,56(%esp) bswap %esi movl %ecx,60(%esp) movl %esi,64(%esp) movl 36(%edi),%ebx movl 40(%edi),%ecx bswap %ebx movl 44(%edi),%esi bswap %ecx movl %ebx,68(%esp) bswap %esi movl %ecx,72(%esp) movl %esi,76(%esp) movl 48(%edi),%ebx movl 52(%edi),%ecx bswap %ebx movl 56(%edi),%esi bswap %ecx movl %ebx,80(%esp) bswap %esi movl %ecx,84(%esp) movl %esi,88(%esp) movl 60(%edi),%ebx addl $64,%edi bswap %ebx movl %edi,100(%esp) movl %ebx,92(%esp) movl %edx,%ecx movl 20(%esp),%esi rorl $14,%edx movl 24(%esp),%edi xorl %ecx,%edx movl 32(%esp),%ebx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx addl 28(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 4(%esp),%edi xorl %eax,%ecx movl %eax,(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 1116352408(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp rorl $2,%ecx addl %edx,%ebp addl 12(%esp),%edx addl %ecx,%ebp movl %edx,%esi movl 16(%esp),%ecx rorl $14,%edx movl 20(%esp),%edi xorl %esi,%edx movl 36(%esp),%ebx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,12(%esp) xorl %esi,%edx addl 24(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl (%esp),%edi xorl %ebp,%esi movl %ebp,28(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1899447441(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax rorl $2,%esi addl %edx,%eax addl 8(%esp),%edx addl %esi,%eax movl %edx,%ecx movl 12(%esp),%esi rorl $14,%edx movl 16(%esp),%edi xorl %ecx,%edx movl 40(%esp),%ebx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx addl 20(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 28(%esp),%edi xorl %eax,%ecx movl %eax,24(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 3049323471(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp rorl $2,%ecx addl %edx,%ebp addl 4(%esp),%edx addl %ecx,%ebp movl %edx,%esi movl 8(%esp),%ecx rorl $14,%edx movl 12(%esp),%edi xorl %esi,%edx movl 44(%esp),%ebx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,4(%esp) xorl %esi,%edx addl 16(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 24(%esp),%edi xorl %ebp,%esi movl %ebp,20(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 3921009573(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax rorl $2,%esi addl %edx,%eax addl (%esp),%edx addl %esi,%eax movl %edx,%ecx movl 4(%esp),%esi rorl $14,%edx movl 8(%esp),%edi xorl %ecx,%edx movl 48(%esp),%ebx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx addl 12(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 20(%esp),%edi xorl %eax,%ecx movl %eax,16(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 961987163(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp rorl $2,%ecx addl %edx,%ebp addl 28(%esp),%edx addl %ecx,%ebp movl %edx,%esi movl (%esp),%ecx rorl $14,%edx movl 4(%esp),%edi xorl %esi,%edx movl 52(%esp),%ebx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,28(%esp) xorl %esi,%edx addl 8(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 16(%esp),%edi xorl %ebp,%esi movl %ebp,12(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1508970993(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax rorl $2,%esi addl %edx,%eax addl 24(%esp),%edx addl %esi,%eax movl %edx,%ecx movl 28(%esp),%esi rorl $14,%edx movl (%esp),%edi xorl %ecx,%edx movl 56(%esp),%ebx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx addl 4(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 12(%esp),%edi xorl %eax,%ecx movl %eax,8(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 2453635748(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp rorl $2,%ecx addl %edx,%ebp addl 20(%esp),%edx addl %ecx,%ebp movl %edx,%esi movl 24(%esp),%ecx rorl $14,%edx movl 28(%esp),%edi xorl %esi,%edx movl 60(%esp),%ebx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,20(%esp) xorl %esi,%edx addl (%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 8(%esp),%edi xorl %ebp,%esi movl %ebp,4(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 2870763221(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax rorl $2,%esi addl %edx,%eax addl 16(%esp),%edx addl %esi,%eax movl %edx,%ecx movl 20(%esp),%esi rorl $14,%edx movl 24(%esp),%edi xorl %ecx,%edx movl 64(%esp),%ebx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx addl 28(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 4(%esp),%edi xorl %eax,%ecx movl %eax,(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 3624381080(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp rorl $2,%ecx addl %edx,%ebp addl 12(%esp),%edx addl %ecx,%ebp movl %edx,%esi movl 16(%esp),%ecx rorl $14,%edx movl 20(%esp),%edi xorl %esi,%edx movl 68(%esp),%ebx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,12(%esp) xorl %esi,%edx addl 24(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl (%esp),%edi xorl %ebp,%esi movl %ebp,28(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 310598401(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax rorl $2,%esi addl %edx,%eax addl 8(%esp),%edx addl %esi,%eax movl %edx,%ecx movl 12(%esp),%esi rorl $14,%edx movl 16(%esp),%edi xorl %ecx,%edx movl 72(%esp),%ebx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx addl 20(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 28(%esp),%edi xorl %eax,%ecx movl %eax,24(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 607225278(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp rorl $2,%ecx addl %edx,%ebp addl 4(%esp),%edx addl %ecx,%ebp movl %edx,%esi movl 8(%esp),%ecx rorl $14,%edx movl 12(%esp),%edi xorl %esi,%edx movl 76(%esp),%ebx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,4(%esp) xorl %esi,%edx addl 16(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 24(%esp),%edi xorl %ebp,%esi movl %ebp,20(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1426881987(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax rorl $2,%esi addl %edx,%eax addl (%esp),%edx addl %esi,%eax movl %edx,%ecx movl 4(%esp),%esi rorl $14,%edx movl 8(%esp),%edi xorl %ecx,%edx movl 80(%esp),%ebx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx addl 12(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 20(%esp),%edi xorl %eax,%ecx movl %eax,16(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 1925078388(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp rorl $2,%ecx addl %edx,%ebp addl 28(%esp),%edx addl %ecx,%ebp movl %edx,%esi movl (%esp),%ecx rorl $14,%edx movl 4(%esp),%edi xorl %esi,%edx movl 84(%esp),%ebx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,28(%esp) xorl %esi,%edx addl 8(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 16(%esp),%edi xorl %ebp,%esi movl %ebp,12(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 2162078206(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax rorl $2,%esi addl %edx,%eax addl 24(%esp),%edx addl %esi,%eax movl %edx,%ecx movl 28(%esp),%esi rorl $14,%edx movl (%esp),%edi xorl %ecx,%edx movl 88(%esp),%ebx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx addl 4(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 12(%esp),%edi xorl %eax,%ecx movl %eax,8(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 2614888103(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp rorl $2,%ecx addl %edx,%ebp addl 20(%esp),%edx addl %ecx,%ebp movl %edx,%esi movl 24(%esp),%ecx rorl $14,%edx movl 28(%esp),%edi xorl %esi,%edx movl 92(%esp),%ebx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,20(%esp) xorl %esi,%edx addl (%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 8(%esp),%edi xorl %ebp,%esi movl %ebp,4(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 3248222580(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 36(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 16(%esp),%edx addl %esi,%eax movl 88(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 32(%esp),%ebx shrl $10,%edi addl 68(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 20(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 24(%esp),%edi xorl %ecx,%edx movl %ebx,32(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx addl 28(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 4(%esp),%edi xorl %eax,%ecx movl %eax,(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 3835390401(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 40(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 12(%esp),%edx addl %ecx,%ebp movl 92(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 36(%esp),%ebx shrl $10,%edi addl 72(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 16(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 20(%esp),%edi xorl %esi,%edx movl %ebx,36(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,12(%esp) xorl %esi,%edx addl 24(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl (%esp),%edi xorl %ebp,%esi movl %ebp,28(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 4022224774(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 44(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 8(%esp),%edx addl %esi,%eax movl 32(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 40(%esp),%ebx shrl $10,%edi addl 76(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 12(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 16(%esp),%edi xorl %ecx,%edx movl %ebx,40(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx addl 20(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 28(%esp),%edi xorl %eax,%ecx movl %eax,24(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 264347078(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 48(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 4(%esp),%edx addl %ecx,%ebp movl 36(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 44(%esp),%ebx shrl $10,%edi addl 80(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 8(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 12(%esp),%edi xorl %esi,%edx movl %ebx,44(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,4(%esp) xorl %esi,%edx addl 16(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 24(%esp),%edi xorl %ebp,%esi movl %ebp,20(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 604807628(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 52(%esp),%ecx rorl $2,%esi addl %edx,%eax addl (%esp),%edx addl %esi,%eax movl 40(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 48(%esp),%ebx shrl $10,%edi addl 84(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 4(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 8(%esp),%edi xorl %ecx,%edx movl %ebx,48(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx addl 12(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 20(%esp),%edi xorl %eax,%ecx movl %eax,16(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 770255983(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 56(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 28(%esp),%edx addl %ecx,%ebp movl 44(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 52(%esp),%ebx shrl $10,%edi addl 88(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl (%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 4(%esp),%edi xorl %esi,%edx movl %ebx,52(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,28(%esp) xorl %esi,%edx addl 8(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 16(%esp),%edi xorl %ebp,%esi movl %ebp,12(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1249150122(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 60(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 24(%esp),%edx addl %esi,%eax movl 48(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 56(%esp),%ebx shrl $10,%edi addl 92(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 28(%esp),%esi rorl $14,%edx addl %edi,%ebx movl (%esp),%edi xorl %ecx,%edx movl %ebx,56(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx addl 4(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 12(%esp),%edi xorl %eax,%ecx movl %eax,8(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 1555081692(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 64(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 20(%esp),%edx addl %ecx,%ebp movl 52(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 60(%esp),%ebx shrl $10,%edi addl 32(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 24(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 28(%esp),%edi xorl %esi,%edx movl %ebx,60(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,20(%esp) xorl %esi,%edx addl (%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 8(%esp),%edi xorl %ebp,%esi movl %ebp,4(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1996064986(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 68(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 16(%esp),%edx addl %esi,%eax movl 56(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 64(%esp),%ebx shrl $10,%edi addl 36(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 20(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 24(%esp),%edi xorl %ecx,%edx movl %ebx,64(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx addl 28(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 4(%esp),%edi xorl %eax,%ecx movl %eax,(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 2554220882(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 72(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 12(%esp),%edx addl %ecx,%ebp movl 60(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 68(%esp),%ebx shrl $10,%edi addl 40(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 16(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 20(%esp),%edi xorl %esi,%edx movl %ebx,68(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,12(%esp) xorl %esi,%edx addl 24(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl (%esp),%edi xorl %ebp,%esi movl %ebp,28(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 2821834349(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 76(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 8(%esp),%edx addl %esi,%eax movl 64(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 72(%esp),%ebx shrl $10,%edi addl 44(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 12(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 16(%esp),%edi xorl %ecx,%edx movl %ebx,72(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx addl 20(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 28(%esp),%edi xorl %eax,%ecx movl %eax,24(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 2952996808(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 80(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 4(%esp),%edx addl %ecx,%ebp movl 68(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 76(%esp),%ebx shrl $10,%edi addl 48(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 8(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 12(%esp),%edi xorl %esi,%edx movl %ebx,76(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,4(%esp) xorl %esi,%edx addl 16(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 24(%esp),%edi xorl %ebp,%esi movl %ebp,20(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 3210313671(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 84(%esp),%ecx rorl $2,%esi addl %edx,%eax addl (%esp),%edx addl %esi,%eax movl 72(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 80(%esp),%ebx shrl $10,%edi addl 52(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 4(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 8(%esp),%edi xorl %ecx,%edx movl %ebx,80(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx addl 12(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 20(%esp),%edi xorl %eax,%ecx movl %eax,16(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 3336571891(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 88(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 28(%esp),%edx addl %ecx,%ebp movl 76(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 84(%esp),%ebx shrl $10,%edi addl 56(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl (%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 4(%esp),%edi xorl %esi,%edx movl %ebx,84(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,28(%esp) xorl %esi,%edx addl 8(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 16(%esp),%edi xorl %ebp,%esi movl %ebp,12(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 3584528711(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 92(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 24(%esp),%edx addl %esi,%eax movl 80(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 88(%esp),%ebx shrl $10,%edi addl 60(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 28(%esp),%esi rorl $14,%edx addl %edi,%ebx movl (%esp),%edi xorl %ecx,%edx movl %ebx,88(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx addl 4(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 12(%esp),%edi xorl %eax,%ecx movl %eax,8(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 113926993(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 32(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 20(%esp),%edx addl %ecx,%ebp movl 84(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 92(%esp),%ebx shrl $10,%edi addl 64(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 24(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 28(%esp),%edi xorl %esi,%edx movl %ebx,92(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,20(%esp) xorl %esi,%edx addl (%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 8(%esp),%edi xorl %ebp,%esi movl %ebp,4(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 338241895(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 36(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 16(%esp),%edx addl %esi,%eax movl 88(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 32(%esp),%ebx shrl $10,%edi addl 68(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 20(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 24(%esp),%edi xorl %ecx,%edx movl %ebx,32(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx addl 28(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 4(%esp),%edi xorl %eax,%ecx movl %eax,(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 666307205(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 40(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 12(%esp),%edx addl %ecx,%ebp movl 92(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 36(%esp),%ebx shrl $10,%edi addl 72(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 16(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 20(%esp),%edi xorl %esi,%edx movl %ebx,36(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,12(%esp) xorl %esi,%edx addl 24(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl (%esp),%edi xorl %ebp,%esi movl %ebp,28(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 773529912(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 44(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 8(%esp),%edx addl %esi,%eax movl 32(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 40(%esp),%ebx shrl $10,%edi addl 76(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 12(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 16(%esp),%edi xorl %ecx,%edx movl %ebx,40(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx addl 20(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 28(%esp),%edi xorl %eax,%ecx movl %eax,24(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 1294757372(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 48(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 4(%esp),%edx addl %ecx,%ebp movl 36(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 44(%esp),%ebx shrl $10,%edi addl 80(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 8(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 12(%esp),%edi xorl %esi,%edx movl %ebx,44(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,4(%esp) xorl %esi,%edx addl 16(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 24(%esp),%edi xorl %ebp,%esi movl %ebp,20(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1396182291(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 52(%esp),%ecx rorl $2,%esi addl %edx,%eax addl (%esp),%edx addl %esi,%eax movl 40(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 48(%esp),%ebx shrl $10,%edi addl 84(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 4(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 8(%esp),%edi xorl %ecx,%edx movl %ebx,48(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx addl 12(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 20(%esp),%edi xorl %eax,%ecx movl %eax,16(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 1695183700(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 56(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 28(%esp),%edx addl %ecx,%ebp movl 44(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 52(%esp),%ebx shrl $10,%edi addl 88(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl (%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 4(%esp),%edi xorl %esi,%edx movl %ebx,52(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,28(%esp) xorl %esi,%edx addl 8(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 16(%esp),%edi xorl %ebp,%esi movl %ebp,12(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1986661051(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 60(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 24(%esp),%edx addl %esi,%eax movl 48(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 56(%esp),%ebx shrl $10,%edi addl 92(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 28(%esp),%esi rorl $14,%edx addl %edi,%ebx movl (%esp),%edi xorl %ecx,%edx movl %ebx,56(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx addl 4(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 12(%esp),%edi xorl %eax,%ecx movl %eax,8(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 2177026350(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 64(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 20(%esp),%edx addl %ecx,%ebp movl 52(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 60(%esp),%ebx shrl $10,%edi addl 32(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 24(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 28(%esp),%edi xorl %esi,%edx movl %ebx,60(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,20(%esp) xorl %esi,%edx addl (%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 8(%esp),%edi xorl %ebp,%esi movl %ebp,4(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 2456956037(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 68(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 16(%esp),%edx addl %esi,%eax movl 56(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 64(%esp),%ebx shrl $10,%edi addl 36(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 20(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 24(%esp),%edi xorl %ecx,%edx movl %ebx,64(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx addl 28(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 4(%esp),%edi xorl %eax,%ecx movl %eax,(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 2730485921(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 72(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 12(%esp),%edx addl %ecx,%ebp movl 60(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 68(%esp),%ebx shrl $10,%edi addl 40(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 16(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 20(%esp),%edi xorl %esi,%edx movl %ebx,68(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,12(%esp) xorl %esi,%edx addl 24(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl (%esp),%edi xorl %ebp,%esi movl %ebp,28(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 2820302411(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 76(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 8(%esp),%edx addl %esi,%eax movl 64(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 72(%esp),%ebx shrl $10,%edi addl 44(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 12(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 16(%esp),%edi xorl %ecx,%edx movl %ebx,72(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx addl 20(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 28(%esp),%edi xorl %eax,%ecx movl %eax,24(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 3259730800(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 80(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 4(%esp),%edx addl %ecx,%ebp movl 68(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 76(%esp),%ebx shrl $10,%edi addl 48(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 8(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 12(%esp),%edi xorl %esi,%edx movl %ebx,76(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,4(%esp) xorl %esi,%edx addl 16(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 24(%esp),%edi xorl %ebp,%esi movl %ebp,20(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 3345764771(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 84(%esp),%ecx rorl $2,%esi addl %edx,%eax addl (%esp),%edx addl %esi,%eax movl 72(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 80(%esp),%ebx shrl $10,%edi addl 52(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 4(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 8(%esp),%edi xorl %ecx,%edx movl %ebx,80(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx addl 12(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 20(%esp),%edi xorl %eax,%ecx movl %eax,16(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 3516065817(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 88(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 28(%esp),%edx addl %ecx,%ebp movl 76(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 84(%esp),%ebx shrl $10,%edi addl 56(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl (%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 4(%esp),%edi xorl %esi,%edx movl %ebx,84(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,28(%esp) xorl %esi,%edx addl 8(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 16(%esp),%edi xorl %ebp,%esi movl %ebp,12(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 3600352804(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 92(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 24(%esp),%edx addl %esi,%eax movl 80(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 88(%esp),%ebx shrl $10,%edi addl 60(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 28(%esp),%esi rorl $14,%edx addl %edi,%ebx movl (%esp),%edi xorl %ecx,%edx movl %ebx,88(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx addl 4(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 12(%esp),%edi xorl %eax,%ecx movl %eax,8(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 4094571909(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 32(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 20(%esp),%edx addl %ecx,%ebp movl 84(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 92(%esp),%ebx shrl $10,%edi addl 64(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 24(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 28(%esp),%edi xorl %esi,%edx movl %ebx,92(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,20(%esp) xorl %esi,%edx addl (%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 8(%esp),%edi xorl %ebp,%esi movl %ebp,4(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 275423344(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 36(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 16(%esp),%edx addl %esi,%eax movl 88(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 32(%esp),%ebx shrl $10,%edi addl 68(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 20(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 24(%esp),%edi xorl %ecx,%edx movl %ebx,32(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx addl 28(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 4(%esp),%edi xorl %eax,%ecx movl %eax,(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 430227734(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 40(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 12(%esp),%edx addl %ecx,%ebp movl 92(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 36(%esp),%ebx shrl $10,%edi addl 72(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 16(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 20(%esp),%edi xorl %esi,%edx movl %ebx,36(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,12(%esp) xorl %esi,%edx addl 24(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl (%esp),%edi xorl %ebp,%esi movl %ebp,28(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 506948616(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 44(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 8(%esp),%edx addl %esi,%eax movl 32(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 40(%esp),%ebx shrl $10,%edi addl 76(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 12(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 16(%esp),%edi xorl %ecx,%edx movl %ebx,40(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx addl 20(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 28(%esp),%edi xorl %eax,%ecx movl %eax,24(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 659060556(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 48(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 4(%esp),%edx addl %ecx,%ebp movl 36(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 44(%esp),%ebx shrl $10,%edi addl 80(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 8(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 12(%esp),%edi xorl %esi,%edx movl %ebx,44(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,4(%esp) xorl %esi,%edx addl 16(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 24(%esp),%edi xorl %ebp,%esi movl %ebp,20(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 883997877(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 52(%esp),%ecx rorl $2,%esi addl %edx,%eax addl (%esp),%edx addl %esi,%eax movl 40(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 48(%esp),%ebx shrl $10,%edi addl 84(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 4(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 8(%esp),%edi xorl %ecx,%edx movl %ebx,48(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx addl 12(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 20(%esp),%edi xorl %eax,%ecx movl %eax,16(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 958139571(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 56(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 28(%esp),%edx addl %ecx,%ebp movl 44(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 52(%esp),%ebx shrl $10,%edi addl 88(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl (%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 4(%esp),%edi xorl %esi,%edx movl %ebx,52(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,28(%esp) xorl %esi,%edx addl 8(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 16(%esp),%edi xorl %ebp,%esi movl %ebp,12(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1322822218(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 60(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 24(%esp),%edx addl %esi,%eax movl 48(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 56(%esp),%ebx shrl $10,%edi addl 92(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 28(%esp),%esi rorl $14,%edx addl %edi,%ebx movl (%esp),%edi xorl %ecx,%edx movl %ebx,56(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx addl 4(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 12(%esp),%edi xorl %eax,%ecx movl %eax,8(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 1537002063(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 64(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 20(%esp),%edx addl %ecx,%ebp movl 52(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 60(%esp),%ebx shrl $10,%edi addl 32(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 24(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 28(%esp),%edi xorl %esi,%edx movl %ebx,60(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,20(%esp) xorl %esi,%edx addl (%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 8(%esp),%edi xorl %ebp,%esi movl %ebp,4(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 1747873779(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 68(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 16(%esp),%edx addl %esi,%eax movl 56(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 64(%esp),%ebx shrl $10,%edi addl 36(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 20(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 24(%esp),%edi xorl %ecx,%edx movl %ebx,64(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx addl 28(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 4(%esp),%edi xorl %eax,%ecx movl %eax,(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 1955562222(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 72(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 12(%esp),%edx addl %ecx,%ebp movl 60(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 68(%esp),%ebx shrl $10,%edi addl 40(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 16(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 20(%esp),%edi xorl %esi,%edx movl %ebx,68(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,12(%esp) xorl %esi,%edx addl 24(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl (%esp),%edi xorl %ebp,%esi movl %ebp,28(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 2024104815(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 76(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 8(%esp),%edx addl %esi,%eax movl 64(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 72(%esp),%ebx shrl $10,%edi addl 44(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 12(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 16(%esp),%edi xorl %ecx,%edx movl %ebx,72(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx addl 20(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 28(%esp),%edi xorl %eax,%ecx movl %eax,24(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 2227730452(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 80(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 4(%esp),%edx addl %ecx,%ebp movl 68(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 76(%esp),%ebx shrl $10,%edi addl 48(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 8(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 12(%esp),%edi xorl %esi,%edx movl %ebx,76(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,4(%esp) xorl %esi,%edx addl 16(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 24(%esp),%edi xorl %ebp,%esi movl %ebp,20(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 2361852424(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 84(%esp),%ecx rorl $2,%esi addl %edx,%eax addl (%esp),%edx addl %esi,%eax movl 72(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 80(%esp),%ebx shrl $10,%edi addl 52(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 4(%esp),%esi rorl $14,%edx addl %edi,%ebx movl 8(%esp),%edi xorl %ecx,%edx movl %ebx,80(%esp) xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx addl 12(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 20(%esp),%edi xorl %eax,%ecx movl %eax,16(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 2428436474(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 88(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 28(%esp),%edx addl %ecx,%ebp movl 76(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 84(%esp),%ebx shrl $10,%edi addl 56(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl (%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 4(%esp),%edi xorl %esi,%edx movl %ebx,84(%esp) xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,28(%esp) xorl %esi,%edx addl 8(%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 16(%esp),%edi xorl %ebp,%esi movl %ebp,12(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 2756734187(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax movl 92(%esp),%ecx rorl $2,%esi addl %edx,%eax addl 24(%esp),%edx addl %esi,%eax movl 80(%esp),%esi movl %ecx,%ebx rorl $11,%ecx movl %esi,%edi rorl $2,%esi xorl %ebx,%ecx shrl $3,%ebx rorl $7,%ecx xorl %edi,%esi xorl %ecx,%ebx rorl $17,%esi addl 88(%esp),%ebx shrl $10,%edi addl 60(%esp),%ebx movl %edx,%ecx xorl %esi,%edi movl 28(%esp),%esi rorl $14,%edx addl %edi,%ebx movl (%esp),%edi xorl %ecx,%edx xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx addl 4(%esp),%ebx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%ebx rorl $9,%ecx movl %eax,%esi movl 12(%esp),%edi xorl %eax,%ecx movl %eax,8(%esp) xorl %edi,%eax rorl $11,%ecx andl %eax,%ebp leal 3204031479(%ebx,%edx,1),%edx xorl %esi,%ecx xorl %edi,%ebp movl 32(%esp),%esi rorl $2,%ecx addl %edx,%ebp addl 20(%esp),%edx addl %ecx,%ebp movl 84(%esp),%ecx movl %esi,%ebx rorl $11,%esi movl %ecx,%edi rorl $2,%ecx xorl %ebx,%esi shrl $3,%ebx rorl $7,%esi xorl %edi,%ecx xorl %esi,%ebx rorl $17,%ecx addl 92(%esp),%ebx shrl $10,%edi addl 64(%esp),%ebx movl %edx,%esi xorl %ecx,%edi movl 24(%esp),%ecx rorl $14,%edx addl %edi,%ebx movl 28(%esp),%edi xorl %esi,%edx xorl %edi,%ecx rorl $5,%edx andl %esi,%ecx movl %esi,20(%esp) xorl %esi,%edx addl (%esp),%ebx xorl %ecx,%edi rorl $6,%edx movl %ebp,%esi addl %edi,%ebx rorl $9,%esi movl %ebp,%ecx movl 8(%esp),%edi xorl %ebp,%esi movl %ebp,4(%esp) xorl %edi,%ebp rorl $11,%esi andl %ebp,%eax leal 3329325298(%ebx,%edx,1),%edx xorl %ecx,%esi xorl %edi,%eax rorl $2,%esi addl %edx,%eax addl 16(%esp),%edx addl %esi,%eax movl 96(%esp),%esi xorl %edi,%ebp movl 12(%esp),%ecx addl (%esi),%eax addl 4(%esi),%ebp addl 8(%esi),%edi addl 12(%esi),%ecx movl %eax,(%esi) movl %ebp,4(%esi) movl %edi,8(%esi) movl %ecx,12(%esi) movl %ebp,4(%esp) xorl %edi,%ebp movl %edi,8(%esp) movl %ecx,12(%esp) movl 20(%esp),%edi movl 24(%esp),%ebx movl 28(%esp),%ecx addl 16(%esi),%edx addl 20(%esi),%edi addl 24(%esi),%ebx addl 28(%esi),%ecx movl %edx,16(%esi) movl %edi,20(%esi) movl %ebx,24(%esi) movl %ecx,28(%esi) movl %edi,20(%esp) movl 100(%esp),%edi movl %ebx,24(%esp) movl %ecx,28(%esp) cmpl 104(%esp),%edi jb L006grand_loop movl 108(%esp),%esp popl %edi popl %esi popl %ebx popl %ebp ret .globl _sha256_block_data_order_ssse3 .private_extern _sha256_block_data_order_ssse3 .align 4 _sha256_block_data_order_ssse3: L_sha256_block_data_order_ssse3_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi movl 20(%esp),%esi movl 24(%esp),%edi movl 28(%esp),%eax movl %esp,%ebx call L007pic_point L007pic_point: popl %ebp leal LK256-L007pic_point(%ebp),%ebp subl $16,%esp andl $-64,%esp shll $6,%eax addl %edi,%eax movl %esi,(%esp) movl %edi,4(%esp) movl %eax,8(%esp) movl %ebx,12(%esp) leal -96(%esp),%esp movl (%esi),%eax movl 4(%esi),%ebx movl 8(%esi),%ecx movl 12(%esi),%edi movl %ebx,4(%esp) xorl %ecx,%ebx movl %ecx,8(%esp) movl %edi,12(%esp) movl 16(%esi),%edx movl 20(%esi),%edi movl 24(%esi),%ecx movl 28(%esi),%esi movl %edi,20(%esp) movl 100(%esp),%edi movl %ecx,24(%esp) movl %esi,28(%esp) movdqa 256(%ebp),%xmm7 jmp L008grand_ssse3 .align 4,0x90 L008grand_ssse3: movdqu (%edi),%xmm0 movdqu 16(%edi),%xmm1 movdqu 32(%edi),%xmm2 movdqu 48(%edi),%xmm3 addl $64,%edi .byte 102,15,56,0,199 movl %edi,100(%esp) .byte 102,15,56,0,207 movdqa (%ebp),%xmm4 .byte 102,15,56,0,215 movdqa 16(%ebp),%xmm5 paddd %xmm0,%xmm4 .byte 102,15,56,0,223 movdqa 32(%ebp),%xmm6 paddd %xmm1,%xmm5 movdqa 48(%ebp),%xmm7 movdqa %xmm4,32(%esp) paddd %xmm2,%xmm6 movdqa %xmm5,48(%esp) paddd %xmm3,%xmm7 movdqa %xmm6,64(%esp) movdqa %xmm7,80(%esp) jmp L009ssse3_00_47 .align 4,0x90 L009ssse3_00_47: addl $64,%ebp movl %edx,%ecx movdqa %xmm1,%xmm4 rorl $14,%edx movl 20(%esp),%esi movdqa %xmm3,%xmm7 xorl %ecx,%edx movl 24(%esp),%edi .byte 102,15,58,15,224,4 xorl %edi,%esi rorl $5,%edx andl %ecx,%esi .byte 102,15,58,15,250,4 movl %ecx,16(%esp) xorl %ecx,%edx xorl %esi,%edi movdqa %xmm4,%xmm5 rorl $6,%edx movl %eax,%ecx movdqa %xmm4,%xmm6 addl %edi,%edx movl 4(%esp),%edi psrld $3,%xmm4 movl %eax,%esi rorl $9,%ecx paddd %xmm7,%xmm0 movl %eax,(%esp) xorl %eax,%ecx psrld $7,%xmm6 xorl %edi,%eax addl 28(%esp),%edx rorl $11,%ecx andl %eax,%ebx pshufd $250,%xmm3,%xmm7 xorl %esi,%ecx addl 32(%esp),%edx pslld $14,%xmm5 xorl %edi,%ebx rorl $2,%ecx pxor %xmm6,%xmm4 addl %edx,%ebx addl 12(%esp),%edx psrld $11,%xmm6 addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx pxor %xmm5,%xmm4 movl 16(%esp),%esi xorl %ecx,%edx pslld $11,%xmm5 movl 20(%esp),%edi xorl %edi,%esi rorl $5,%edx pxor %xmm6,%xmm4 andl %ecx,%esi movl %ecx,12(%esp) movdqa %xmm7,%xmm6 xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx pxor %xmm5,%xmm4 movl %ebx,%ecx addl %edi,%edx psrld $10,%xmm7 movl (%esp),%edi movl %ebx,%esi rorl $9,%ecx paddd %xmm4,%xmm0 movl %ebx,28(%esp) xorl %ebx,%ecx psrlq $17,%xmm6 xorl %edi,%ebx addl 24(%esp),%edx rorl $11,%ecx pxor %xmm6,%xmm7 andl %ebx,%eax xorl %esi,%ecx psrlq $2,%xmm6 addl 36(%esp),%edx xorl %edi,%eax rorl $2,%ecx pxor %xmm6,%xmm7 addl %edx,%eax addl 8(%esp),%edx pshufd $128,%xmm7,%xmm7 addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 12(%esp),%esi xorl %ecx,%edx movl 16(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi psrldq $8,%xmm7 movl %ecx,8(%esp) xorl %ecx,%edx xorl %esi,%edi paddd %xmm7,%xmm0 rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 28(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,24(%esp) pshufd $80,%xmm0,%xmm7 xorl %eax,%ecx xorl %edi,%eax addl 20(%esp),%edx movdqa %xmm7,%xmm6 rorl $11,%ecx psrld $10,%xmm7 andl %eax,%ebx psrlq $17,%xmm6 xorl %esi,%ecx addl 40(%esp),%edx xorl %edi,%ebx rorl $2,%ecx pxor %xmm6,%xmm7 addl %edx,%ebx addl 4(%esp),%edx psrlq $2,%xmm6 addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx pxor %xmm6,%xmm7 movl 8(%esp),%esi xorl %ecx,%edx movl 12(%esp),%edi pshufd $8,%xmm7,%xmm7 xorl %edi,%esi rorl $5,%edx movdqa (%ebp),%xmm6 andl %ecx,%esi movl %ecx,4(%esp) pslldq $8,%xmm7 xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 24(%esp),%edi movl %ebx,%esi rorl $9,%ecx paddd %xmm7,%xmm0 movl %ebx,20(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 16(%esp),%edx paddd %xmm0,%xmm6 rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 44(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl (%esp),%edx addl %ecx,%eax movdqa %xmm6,32(%esp) movl %edx,%ecx movdqa %xmm2,%xmm4 rorl $14,%edx movl 4(%esp),%esi movdqa %xmm0,%xmm7 xorl %ecx,%edx movl 8(%esp),%edi .byte 102,15,58,15,225,4 xorl %edi,%esi rorl $5,%edx andl %ecx,%esi .byte 102,15,58,15,251,4 movl %ecx,(%esp) xorl %ecx,%edx xorl %esi,%edi movdqa %xmm4,%xmm5 rorl $6,%edx movl %eax,%ecx movdqa %xmm4,%xmm6 addl %edi,%edx movl 20(%esp),%edi psrld $3,%xmm4 movl %eax,%esi rorl $9,%ecx paddd %xmm7,%xmm1 movl %eax,16(%esp) xorl %eax,%ecx psrld $7,%xmm6 xorl %edi,%eax addl 12(%esp),%edx rorl $11,%ecx andl %eax,%ebx pshufd $250,%xmm0,%xmm7 xorl %esi,%ecx addl 48(%esp),%edx pslld $14,%xmm5 xorl %edi,%ebx rorl $2,%ecx pxor %xmm6,%xmm4 addl %edx,%ebx addl 28(%esp),%edx psrld $11,%xmm6 addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx pxor %xmm5,%xmm4 movl (%esp),%esi xorl %ecx,%edx pslld $11,%xmm5 movl 4(%esp),%edi xorl %edi,%esi rorl $5,%edx pxor %xmm6,%xmm4 andl %ecx,%esi movl %ecx,28(%esp) movdqa %xmm7,%xmm6 xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx pxor %xmm5,%xmm4 movl %ebx,%ecx addl %edi,%edx psrld $10,%xmm7 movl 16(%esp),%edi movl %ebx,%esi rorl $9,%ecx paddd %xmm4,%xmm1 movl %ebx,12(%esp) xorl %ebx,%ecx psrlq $17,%xmm6 xorl %edi,%ebx addl 8(%esp),%edx rorl $11,%ecx pxor %xmm6,%xmm7 andl %ebx,%eax xorl %esi,%ecx psrlq $2,%xmm6 addl 52(%esp),%edx xorl %edi,%eax rorl $2,%ecx pxor %xmm6,%xmm7 addl %edx,%eax addl 24(%esp),%edx pshufd $128,%xmm7,%xmm7 addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 28(%esp),%esi xorl %ecx,%edx movl (%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi psrldq $8,%xmm7 movl %ecx,24(%esp) xorl %ecx,%edx xorl %esi,%edi paddd %xmm7,%xmm1 rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 12(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,8(%esp) pshufd $80,%xmm1,%xmm7 xorl %eax,%ecx xorl %edi,%eax addl 4(%esp),%edx movdqa %xmm7,%xmm6 rorl $11,%ecx psrld $10,%xmm7 andl %eax,%ebx psrlq $17,%xmm6 xorl %esi,%ecx addl 56(%esp),%edx xorl %edi,%ebx rorl $2,%ecx pxor %xmm6,%xmm7 addl %edx,%ebx addl 20(%esp),%edx psrlq $2,%xmm6 addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx pxor %xmm6,%xmm7 movl 24(%esp),%esi xorl %ecx,%edx movl 28(%esp),%edi pshufd $8,%xmm7,%xmm7 xorl %edi,%esi rorl $5,%edx movdqa 16(%ebp),%xmm6 andl %ecx,%esi movl %ecx,20(%esp) pslldq $8,%xmm7 xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 8(%esp),%edi movl %ebx,%esi rorl $9,%ecx paddd %xmm7,%xmm1 movl %ebx,4(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl (%esp),%edx paddd %xmm1,%xmm6 rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 60(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl 16(%esp),%edx addl %ecx,%eax movdqa %xmm6,48(%esp) movl %edx,%ecx movdqa %xmm3,%xmm4 rorl $14,%edx movl 20(%esp),%esi movdqa %xmm1,%xmm7 xorl %ecx,%edx movl 24(%esp),%edi .byte 102,15,58,15,226,4 xorl %edi,%esi rorl $5,%edx andl %ecx,%esi .byte 102,15,58,15,248,4 movl %ecx,16(%esp) xorl %ecx,%edx xorl %esi,%edi movdqa %xmm4,%xmm5 rorl $6,%edx movl %eax,%ecx movdqa %xmm4,%xmm6 addl %edi,%edx movl 4(%esp),%edi psrld $3,%xmm4 movl %eax,%esi rorl $9,%ecx paddd %xmm7,%xmm2 movl %eax,(%esp) xorl %eax,%ecx psrld $7,%xmm6 xorl %edi,%eax addl 28(%esp),%edx rorl $11,%ecx andl %eax,%ebx pshufd $250,%xmm1,%xmm7 xorl %esi,%ecx addl 64(%esp),%edx pslld $14,%xmm5 xorl %edi,%ebx rorl $2,%ecx pxor %xmm6,%xmm4 addl %edx,%ebx addl 12(%esp),%edx psrld $11,%xmm6 addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx pxor %xmm5,%xmm4 movl 16(%esp),%esi xorl %ecx,%edx pslld $11,%xmm5 movl 20(%esp),%edi xorl %edi,%esi rorl $5,%edx pxor %xmm6,%xmm4 andl %ecx,%esi movl %ecx,12(%esp) movdqa %xmm7,%xmm6 xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx pxor %xmm5,%xmm4 movl %ebx,%ecx addl %edi,%edx psrld $10,%xmm7 movl (%esp),%edi movl %ebx,%esi rorl $9,%ecx paddd %xmm4,%xmm2 movl %ebx,28(%esp) xorl %ebx,%ecx psrlq $17,%xmm6 xorl %edi,%ebx addl 24(%esp),%edx rorl $11,%ecx pxor %xmm6,%xmm7 andl %ebx,%eax xorl %esi,%ecx psrlq $2,%xmm6 addl 68(%esp),%edx xorl %edi,%eax rorl $2,%ecx pxor %xmm6,%xmm7 addl %edx,%eax addl 8(%esp),%edx pshufd $128,%xmm7,%xmm7 addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 12(%esp),%esi xorl %ecx,%edx movl 16(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi psrldq $8,%xmm7 movl %ecx,8(%esp) xorl %ecx,%edx xorl %esi,%edi paddd %xmm7,%xmm2 rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 28(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,24(%esp) pshufd $80,%xmm2,%xmm7 xorl %eax,%ecx xorl %edi,%eax addl 20(%esp),%edx movdqa %xmm7,%xmm6 rorl $11,%ecx psrld $10,%xmm7 andl %eax,%ebx psrlq $17,%xmm6 xorl %esi,%ecx addl 72(%esp),%edx xorl %edi,%ebx rorl $2,%ecx pxor %xmm6,%xmm7 addl %edx,%ebx addl 4(%esp),%edx psrlq $2,%xmm6 addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx pxor %xmm6,%xmm7 movl 8(%esp),%esi xorl %ecx,%edx movl 12(%esp),%edi pshufd $8,%xmm7,%xmm7 xorl %edi,%esi rorl $5,%edx movdqa 32(%ebp),%xmm6 andl %ecx,%esi movl %ecx,4(%esp) pslldq $8,%xmm7 xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 24(%esp),%edi movl %ebx,%esi rorl $9,%ecx paddd %xmm7,%xmm2 movl %ebx,20(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 16(%esp),%edx paddd %xmm2,%xmm6 rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 76(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl (%esp),%edx addl %ecx,%eax movdqa %xmm6,64(%esp) movl %edx,%ecx movdqa %xmm0,%xmm4 rorl $14,%edx movl 4(%esp),%esi movdqa %xmm2,%xmm7 xorl %ecx,%edx movl 8(%esp),%edi .byte 102,15,58,15,227,4 xorl %edi,%esi rorl $5,%edx andl %ecx,%esi .byte 102,15,58,15,249,4 movl %ecx,(%esp) xorl %ecx,%edx xorl %esi,%edi movdqa %xmm4,%xmm5 rorl $6,%edx movl %eax,%ecx movdqa %xmm4,%xmm6 addl %edi,%edx movl 20(%esp),%edi psrld $3,%xmm4 movl %eax,%esi rorl $9,%ecx paddd %xmm7,%xmm3 movl %eax,16(%esp) xorl %eax,%ecx psrld $7,%xmm6 xorl %edi,%eax addl 12(%esp),%edx rorl $11,%ecx andl %eax,%ebx pshufd $250,%xmm2,%xmm7 xorl %esi,%ecx addl 80(%esp),%edx pslld $14,%xmm5 xorl %edi,%ebx rorl $2,%ecx pxor %xmm6,%xmm4 addl %edx,%ebx addl 28(%esp),%edx psrld $11,%xmm6 addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx pxor %xmm5,%xmm4 movl (%esp),%esi xorl %ecx,%edx pslld $11,%xmm5 movl 4(%esp),%edi xorl %edi,%esi rorl $5,%edx pxor %xmm6,%xmm4 andl %ecx,%esi movl %ecx,28(%esp) movdqa %xmm7,%xmm6 xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx pxor %xmm5,%xmm4 movl %ebx,%ecx addl %edi,%edx psrld $10,%xmm7 movl 16(%esp),%edi movl %ebx,%esi rorl $9,%ecx paddd %xmm4,%xmm3 movl %ebx,12(%esp) xorl %ebx,%ecx psrlq $17,%xmm6 xorl %edi,%ebx addl 8(%esp),%edx rorl $11,%ecx pxor %xmm6,%xmm7 andl %ebx,%eax xorl %esi,%ecx psrlq $2,%xmm6 addl 84(%esp),%edx xorl %edi,%eax rorl $2,%ecx pxor %xmm6,%xmm7 addl %edx,%eax addl 24(%esp),%edx pshufd $128,%xmm7,%xmm7 addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 28(%esp),%esi xorl %ecx,%edx movl (%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi psrldq $8,%xmm7 movl %ecx,24(%esp) xorl %ecx,%edx xorl %esi,%edi paddd %xmm7,%xmm3 rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 12(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,8(%esp) pshufd $80,%xmm3,%xmm7 xorl %eax,%ecx xorl %edi,%eax addl 4(%esp),%edx movdqa %xmm7,%xmm6 rorl $11,%ecx psrld $10,%xmm7 andl %eax,%ebx psrlq $17,%xmm6 xorl %esi,%ecx addl 88(%esp),%edx xorl %edi,%ebx rorl $2,%ecx pxor %xmm6,%xmm7 addl %edx,%ebx addl 20(%esp),%edx psrlq $2,%xmm6 addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx pxor %xmm6,%xmm7 movl 24(%esp),%esi xorl %ecx,%edx movl 28(%esp),%edi pshufd $8,%xmm7,%xmm7 xorl %edi,%esi rorl $5,%edx movdqa 48(%ebp),%xmm6 andl %ecx,%esi movl %ecx,20(%esp) pslldq $8,%xmm7 xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 8(%esp),%edi movl %ebx,%esi rorl $9,%ecx paddd %xmm7,%xmm3 movl %ebx,4(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl (%esp),%edx paddd %xmm3,%xmm6 rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 92(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl 16(%esp),%edx addl %ecx,%eax movdqa %xmm6,80(%esp) cmpl $66051,64(%ebp) jne L009ssse3_00_47 movl %edx,%ecx rorl $14,%edx movl 20(%esp),%esi xorl %ecx,%edx movl 24(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 4(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,(%esp) xorl %eax,%ecx xorl %edi,%eax addl 28(%esp),%edx rorl $11,%ecx andl %eax,%ebx xorl %esi,%ecx addl 32(%esp),%edx xorl %edi,%ebx rorl $2,%ecx addl %edx,%ebx addl 12(%esp),%edx addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx movl 16(%esp),%esi xorl %ecx,%edx movl 20(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,12(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl (%esp),%edi movl %ebx,%esi rorl $9,%ecx movl %ebx,28(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 24(%esp),%edx rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 36(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl 8(%esp),%edx addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 12(%esp),%esi xorl %ecx,%edx movl 16(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 28(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,24(%esp) xorl %eax,%ecx xorl %edi,%eax addl 20(%esp),%edx rorl $11,%ecx andl %eax,%ebx xorl %esi,%ecx addl 40(%esp),%edx xorl %edi,%ebx rorl $2,%ecx addl %edx,%ebx addl 4(%esp),%edx addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx movl 8(%esp),%esi xorl %ecx,%edx movl 12(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,4(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 24(%esp),%edi movl %ebx,%esi rorl $9,%ecx movl %ebx,20(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 16(%esp),%edx rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 44(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl (%esp),%edx addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 4(%esp),%esi xorl %ecx,%edx movl 8(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 20(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,16(%esp) xorl %eax,%ecx xorl %edi,%eax addl 12(%esp),%edx rorl $11,%ecx andl %eax,%ebx xorl %esi,%ecx addl 48(%esp),%edx xorl %edi,%ebx rorl $2,%ecx addl %edx,%ebx addl 28(%esp),%edx addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx movl (%esp),%esi xorl %ecx,%edx movl 4(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,28(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 16(%esp),%edi movl %ebx,%esi rorl $9,%ecx movl %ebx,12(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 8(%esp),%edx rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 52(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl 24(%esp),%edx addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 28(%esp),%esi xorl %ecx,%edx movl (%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 12(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,8(%esp) xorl %eax,%ecx xorl %edi,%eax addl 4(%esp),%edx rorl $11,%ecx andl %eax,%ebx xorl %esi,%ecx addl 56(%esp),%edx xorl %edi,%ebx rorl $2,%ecx addl %edx,%ebx addl 20(%esp),%edx addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx movl 24(%esp),%esi xorl %ecx,%edx movl 28(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,20(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 8(%esp),%edi movl %ebx,%esi rorl $9,%ecx movl %ebx,4(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl (%esp),%edx rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 60(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl 16(%esp),%edx addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 20(%esp),%esi xorl %ecx,%edx movl 24(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 4(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,(%esp) xorl %eax,%ecx xorl %edi,%eax addl 28(%esp),%edx rorl $11,%ecx andl %eax,%ebx xorl %esi,%ecx addl 64(%esp),%edx xorl %edi,%ebx rorl $2,%ecx addl %edx,%ebx addl 12(%esp),%edx addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx movl 16(%esp),%esi xorl %ecx,%edx movl 20(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,12(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl (%esp),%edi movl %ebx,%esi rorl $9,%ecx movl %ebx,28(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 24(%esp),%edx rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 68(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl 8(%esp),%edx addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 12(%esp),%esi xorl %ecx,%edx movl 16(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 28(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,24(%esp) xorl %eax,%ecx xorl %edi,%eax addl 20(%esp),%edx rorl $11,%ecx andl %eax,%ebx xorl %esi,%ecx addl 72(%esp),%edx xorl %edi,%ebx rorl $2,%ecx addl %edx,%ebx addl 4(%esp),%edx addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx movl 8(%esp),%esi xorl %ecx,%edx movl 12(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,4(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 24(%esp),%edi movl %ebx,%esi rorl $9,%ecx movl %ebx,20(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 16(%esp),%edx rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 76(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl (%esp),%edx addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 4(%esp),%esi xorl %ecx,%edx movl 8(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 20(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,16(%esp) xorl %eax,%ecx xorl %edi,%eax addl 12(%esp),%edx rorl $11,%ecx andl %eax,%ebx xorl %esi,%ecx addl 80(%esp),%edx xorl %edi,%ebx rorl $2,%ecx addl %edx,%ebx addl 28(%esp),%edx addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx movl (%esp),%esi xorl %ecx,%edx movl 4(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,28(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 16(%esp),%edi movl %ebx,%esi rorl $9,%ecx movl %ebx,12(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 8(%esp),%edx rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 84(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl 24(%esp),%edx addl %ecx,%eax movl %edx,%ecx rorl $14,%edx movl 28(%esp),%esi xorl %ecx,%edx movl (%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %eax,%ecx addl %edi,%edx movl 12(%esp),%edi movl %eax,%esi rorl $9,%ecx movl %eax,8(%esp) xorl %eax,%ecx xorl %edi,%eax addl 4(%esp),%edx rorl $11,%ecx andl %eax,%ebx xorl %esi,%ecx addl 88(%esp),%edx xorl %edi,%ebx rorl $2,%ecx addl %edx,%ebx addl 20(%esp),%edx addl %ecx,%ebx movl %edx,%ecx rorl $14,%edx movl 24(%esp),%esi xorl %ecx,%edx movl 28(%esp),%edi xorl %edi,%esi rorl $5,%edx andl %ecx,%esi movl %ecx,20(%esp) xorl %ecx,%edx xorl %esi,%edi rorl $6,%edx movl %ebx,%ecx addl %edi,%edx movl 8(%esp),%edi movl %ebx,%esi rorl $9,%ecx movl %ebx,4(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl (%esp),%edx rorl $11,%ecx andl %ebx,%eax xorl %esi,%ecx addl 92(%esp),%edx xorl %edi,%eax rorl $2,%ecx addl %edx,%eax addl 16(%esp),%edx addl %ecx,%eax movl 96(%esp),%esi xorl %edi,%ebx movl 12(%esp),%ecx addl (%esi),%eax addl 4(%esi),%ebx addl 8(%esi),%edi addl 12(%esi),%ecx movl %eax,(%esi) movl %ebx,4(%esi) movl %edi,8(%esi) movl %ecx,12(%esi) movl %ebx,4(%esp) xorl %edi,%ebx movl %edi,8(%esp) movl %ecx,12(%esp) movl 20(%esp),%edi movl 24(%esp),%ecx addl 16(%esi),%edx addl 20(%esi),%edi addl 24(%esi),%ecx movl %edx,16(%esi) movl %edi,20(%esi) movl %edi,20(%esp) movl 28(%esp),%edi movl %ecx,24(%esi) addl 28(%esi),%edi movl %ecx,24(%esp) movl %edi,28(%esi) movl %edi,28(%esp) movl 100(%esp),%edi movdqa 64(%ebp),%xmm7 subl $192,%ebp cmpl 104(%esp),%edi jb L008grand_ssse3 movl 108(%esp),%esp popl %edi popl %esi popl %ebx popl %ebp ret .globl _sha256_block_data_order_avx .private_extern _sha256_block_data_order_avx .align 4 _sha256_block_data_order_avx: L_sha256_block_data_order_avx_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi movl 20(%esp),%esi movl 24(%esp),%edi movl 28(%esp),%eax movl %esp,%ebx call L010pic_point L010pic_point: popl %ebp leal LK256-L010pic_point(%ebp),%ebp subl $16,%esp andl $-64,%esp shll $6,%eax addl %edi,%eax movl %esi,(%esp) movl %edi,4(%esp) movl %eax,8(%esp) movl %ebx,12(%esp) leal -96(%esp),%esp vzeroall movl (%esi),%eax movl 4(%esi),%ebx movl 8(%esi),%ecx movl 12(%esi),%edi movl %ebx,4(%esp) xorl %ecx,%ebx movl %ecx,8(%esp) movl %edi,12(%esp) movl 16(%esi),%edx movl 20(%esi),%edi movl 24(%esi),%ecx movl 28(%esi),%esi movl %edi,20(%esp) movl 100(%esp),%edi movl %ecx,24(%esp) movl %esi,28(%esp) vmovdqa 256(%ebp),%xmm7 jmp L011grand_avx .align 5,0x90 L011grand_avx: vmovdqu (%edi),%xmm0 vmovdqu 16(%edi),%xmm1 vmovdqu 32(%edi),%xmm2 vmovdqu 48(%edi),%xmm3 addl $64,%edi vpshufb %xmm7,%xmm0,%xmm0 movl %edi,100(%esp) vpshufb %xmm7,%xmm1,%xmm1 vpshufb %xmm7,%xmm2,%xmm2 vpaddd (%ebp),%xmm0,%xmm4 vpshufb %xmm7,%xmm3,%xmm3 vpaddd 16(%ebp),%xmm1,%xmm5 vpaddd 32(%ebp),%xmm2,%xmm6 vpaddd 48(%ebp),%xmm3,%xmm7 vmovdqa %xmm4,32(%esp) vmovdqa %xmm5,48(%esp) vmovdqa %xmm6,64(%esp) vmovdqa %xmm7,80(%esp) jmp L012avx_00_47 .align 4,0x90 L012avx_00_47: addl $64,%ebp vpalignr $4,%xmm0,%xmm1,%xmm4 movl %edx,%ecx shrdl $14,%edx,%edx movl 20(%esp),%esi vpalignr $4,%xmm2,%xmm3,%xmm7 xorl %ecx,%edx movl 24(%esp),%edi xorl %edi,%esi vpsrld $7,%xmm4,%xmm6 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,16(%esp) vpaddd %xmm7,%xmm0,%xmm0 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrld $3,%xmm4,%xmm7 movl %eax,%ecx addl %edi,%edx movl 4(%esp),%edi vpslld $14,%xmm4,%xmm5 movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,(%esp) vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%ecx xorl %edi,%eax addl 28(%esp),%edx vpshufd $250,%xmm3,%xmm7 shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx vpsrld $11,%xmm6,%xmm6 addl 32(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx vpxor %xmm5,%xmm4,%xmm4 addl %edx,%ebx addl 12(%esp),%edx addl %ecx,%ebx vpslld $11,%xmm5,%xmm5 movl %edx,%ecx shrdl $14,%edx,%edx movl 16(%esp),%esi vpxor %xmm6,%xmm4,%xmm4 xorl %ecx,%edx movl 20(%esp),%edi xorl %edi,%esi vpsrld $10,%xmm7,%xmm6 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,12(%esp) vpxor %xmm5,%xmm4,%xmm4 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrlq $17,%xmm7,%xmm5 movl %ebx,%ecx addl %edi,%edx movl (%esp),%edi vpaddd %xmm4,%xmm0,%xmm0 movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,28(%esp) vpxor %xmm5,%xmm6,%xmm6 xorl %ebx,%ecx xorl %edi,%ebx addl 24(%esp),%edx vpsrlq $19,%xmm7,%xmm7 shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx vpxor %xmm7,%xmm6,%xmm6 addl 36(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx vpshufd $132,%xmm6,%xmm7 addl %edx,%eax addl 8(%esp),%edx addl %ecx,%eax vpsrldq $8,%xmm7,%xmm7 movl %edx,%ecx shrdl $14,%edx,%edx movl 12(%esp),%esi vpaddd %xmm7,%xmm0,%xmm0 xorl %ecx,%edx movl 16(%esp),%edi xorl %edi,%esi vpshufd $80,%xmm0,%xmm7 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,8(%esp) vpsrld $10,%xmm7,%xmm6 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrlq $17,%xmm7,%xmm5 movl %eax,%ecx addl %edi,%edx movl 28(%esp),%edi vpxor %xmm5,%xmm6,%xmm6 movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,24(%esp) vpsrlq $19,%xmm7,%xmm7 xorl %eax,%ecx xorl %edi,%eax addl 20(%esp),%edx vpxor %xmm7,%xmm6,%xmm6 shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx vpshufd $232,%xmm6,%xmm7 addl 40(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx vpslldq $8,%xmm7,%xmm7 addl %edx,%ebx addl 4(%esp),%edx addl %ecx,%ebx vpaddd %xmm7,%xmm0,%xmm0 movl %edx,%ecx shrdl $14,%edx,%edx movl 8(%esp),%esi vpaddd (%ebp),%xmm0,%xmm6 xorl %ecx,%edx movl 12(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,4(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 24(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,20(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 16(%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 44(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl (%esp),%edx addl %ecx,%eax vmovdqa %xmm6,32(%esp) vpalignr $4,%xmm1,%xmm2,%xmm4 movl %edx,%ecx shrdl $14,%edx,%edx movl 4(%esp),%esi vpalignr $4,%xmm3,%xmm0,%xmm7 xorl %ecx,%edx movl 8(%esp),%edi xorl %edi,%esi vpsrld $7,%xmm4,%xmm6 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,(%esp) vpaddd %xmm7,%xmm1,%xmm1 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrld $3,%xmm4,%xmm7 movl %eax,%ecx addl %edi,%edx movl 20(%esp),%edi vpslld $14,%xmm4,%xmm5 movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,16(%esp) vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%ecx xorl %edi,%eax addl 12(%esp),%edx vpshufd $250,%xmm0,%xmm7 shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx vpsrld $11,%xmm6,%xmm6 addl 48(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx vpxor %xmm5,%xmm4,%xmm4 addl %edx,%ebx addl 28(%esp),%edx addl %ecx,%ebx vpslld $11,%xmm5,%xmm5 movl %edx,%ecx shrdl $14,%edx,%edx movl (%esp),%esi vpxor %xmm6,%xmm4,%xmm4 xorl %ecx,%edx movl 4(%esp),%edi xorl %edi,%esi vpsrld $10,%xmm7,%xmm6 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,28(%esp) vpxor %xmm5,%xmm4,%xmm4 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrlq $17,%xmm7,%xmm5 movl %ebx,%ecx addl %edi,%edx movl 16(%esp),%edi vpaddd %xmm4,%xmm1,%xmm1 movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,12(%esp) vpxor %xmm5,%xmm6,%xmm6 xorl %ebx,%ecx xorl %edi,%ebx addl 8(%esp),%edx vpsrlq $19,%xmm7,%xmm7 shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx vpxor %xmm7,%xmm6,%xmm6 addl 52(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx vpshufd $132,%xmm6,%xmm7 addl %edx,%eax addl 24(%esp),%edx addl %ecx,%eax vpsrldq $8,%xmm7,%xmm7 movl %edx,%ecx shrdl $14,%edx,%edx movl 28(%esp),%esi vpaddd %xmm7,%xmm1,%xmm1 xorl %ecx,%edx movl (%esp),%edi xorl %edi,%esi vpshufd $80,%xmm1,%xmm7 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,24(%esp) vpsrld $10,%xmm7,%xmm6 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrlq $17,%xmm7,%xmm5 movl %eax,%ecx addl %edi,%edx movl 12(%esp),%edi vpxor %xmm5,%xmm6,%xmm6 movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,8(%esp) vpsrlq $19,%xmm7,%xmm7 xorl %eax,%ecx xorl %edi,%eax addl 4(%esp),%edx vpxor %xmm7,%xmm6,%xmm6 shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx vpshufd $232,%xmm6,%xmm7 addl 56(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx vpslldq $8,%xmm7,%xmm7 addl %edx,%ebx addl 20(%esp),%edx addl %ecx,%ebx vpaddd %xmm7,%xmm1,%xmm1 movl %edx,%ecx shrdl $14,%edx,%edx movl 24(%esp),%esi vpaddd 16(%ebp),%xmm1,%xmm6 xorl %ecx,%edx movl 28(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,20(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 8(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,4(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl (%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 60(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl 16(%esp),%edx addl %ecx,%eax vmovdqa %xmm6,48(%esp) vpalignr $4,%xmm2,%xmm3,%xmm4 movl %edx,%ecx shrdl $14,%edx,%edx movl 20(%esp),%esi vpalignr $4,%xmm0,%xmm1,%xmm7 xorl %ecx,%edx movl 24(%esp),%edi xorl %edi,%esi vpsrld $7,%xmm4,%xmm6 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,16(%esp) vpaddd %xmm7,%xmm2,%xmm2 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrld $3,%xmm4,%xmm7 movl %eax,%ecx addl %edi,%edx movl 4(%esp),%edi vpslld $14,%xmm4,%xmm5 movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,(%esp) vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%ecx xorl %edi,%eax addl 28(%esp),%edx vpshufd $250,%xmm1,%xmm7 shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx vpsrld $11,%xmm6,%xmm6 addl 64(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx vpxor %xmm5,%xmm4,%xmm4 addl %edx,%ebx addl 12(%esp),%edx addl %ecx,%ebx vpslld $11,%xmm5,%xmm5 movl %edx,%ecx shrdl $14,%edx,%edx movl 16(%esp),%esi vpxor %xmm6,%xmm4,%xmm4 xorl %ecx,%edx movl 20(%esp),%edi xorl %edi,%esi vpsrld $10,%xmm7,%xmm6 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,12(%esp) vpxor %xmm5,%xmm4,%xmm4 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrlq $17,%xmm7,%xmm5 movl %ebx,%ecx addl %edi,%edx movl (%esp),%edi vpaddd %xmm4,%xmm2,%xmm2 movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,28(%esp) vpxor %xmm5,%xmm6,%xmm6 xorl %ebx,%ecx xorl %edi,%ebx addl 24(%esp),%edx vpsrlq $19,%xmm7,%xmm7 shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx vpxor %xmm7,%xmm6,%xmm6 addl 68(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx vpshufd $132,%xmm6,%xmm7 addl %edx,%eax addl 8(%esp),%edx addl %ecx,%eax vpsrldq $8,%xmm7,%xmm7 movl %edx,%ecx shrdl $14,%edx,%edx movl 12(%esp),%esi vpaddd %xmm7,%xmm2,%xmm2 xorl %ecx,%edx movl 16(%esp),%edi xorl %edi,%esi vpshufd $80,%xmm2,%xmm7 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,8(%esp) vpsrld $10,%xmm7,%xmm6 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrlq $17,%xmm7,%xmm5 movl %eax,%ecx addl %edi,%edx movl 28(%esp),%edi vpxor %xmm5,%xmm6,%xmm6 movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,24(%esp) vpsrlq $19,%xmm7,%xmm7 xorl %eax,%ecx xorl %edi,%eax addl 20(%esp),%edx vpxor %xmm7,%xmm6,%xmm6 shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx vpshufd $232,%xmm6,%xmm7 addl 72(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx vpslldq $8,%xmm7,%xmm7 addl %edx,%ebx addl 4(%esp),%edx addl %ecx,%ebx vpaddd %xmm7,%xmm2,%xmm2 movl %edx,%ecx shrdl $14,%edx,%edx movl 8(%esp),%esi vpaddd 32(%ebp),%xmm2,%xmm6 xorl %ecx,%edx movl 12(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,4(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 24(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,20(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 16(%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 76(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl (%esp),%edx addl %ecx,%eax vmovdqa %xmm6,64(%esp) vpalignr $4,%xmm3,%xmm0,%xmm4 movl %edx,%ecx shrdl $14,%edx,%edx movl 4(%esp),%esi vpalignr $4,%xmm1,%xmm2,%xmm7 xorl %ecx,%edx movl 8(%esp),%edi xorl %edi,%esi vpsrld $7,%xmm4,%xmm6 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,(%esp) vpaddd %xmm7,%xmm3,%xmm3 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrld $3,%xmm4,%xmm7 movl %eax,%ecx addl %edi,%edx movl 20(%esp),%edi vpslld $14,%xmm4,%xmm5 movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,16(%esp) vpxor %xmm6,%xmm7,%xmm4 xorl %eax,%ecx xorl %edi,%eax addl 12(%esp),%edx vpshufd $250,%xmm2,%xmm7 shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx vpsrld $11,%xmm6,%xmm6 addl 80(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx vpxor %xmm5,%xmm4,%xmm4 addl %edx,%ebx addl 28(%esp),%edx addl %ecx,%ebx vpslld $11,%xmm5,%xmm5 movl %edx,%ecx shrdl $14,%edx,%edx movl (%esp),%esi vpxor %xmm6,%xmm4,%xmm4 xorl %ecx,%edx movl 4(%esp),%edi xorl %edi,%esi vpsrld $10,%xmm7,%xmm6 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,28(%esp) vpxor %xmm5,%xmm4,%xmm4 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrlq $17,%xmm7,%xmm5 movl %ebx,%ecx addl %edi,%edx movl 16(%esp),%edi vpaddd %xmm4,%xmm3,%xmm3 movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,12(%esp) vpxor %xmm5,%xmm6,%xmm6 xorl %ebx,%ecx xorl %edi,%ebx addl 8(%esp),%edx vpsrlq $19,%xmm7,%xmm7 shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx vpxor %xmm7,%xmm6,%xmm6 addl 84(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx vpshufd $132,%xmm6,%xmm7 addl %edx,%eax addl 24(%esp),%edx addl %ecx,%eax vpsrldq $8,%xmm7,%xmm7 movl %edx,%ecx shrdl $14,%edx,%edx movl 28(%esp),%esi vpaddd %xmm7,%xmm3,%xmm3 xorl %ecx,%edx movl (%esp),%edi xorl %edi,%esi vpshufd $80,%xmm3,%xmm7 shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,24(%esp) vpsrld $10,%xmm7,%xmm6 xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx vpsrlq $17,%xmm7,%xmm5 movl %eax,%ecx addl %edi,%edx movl 12(%esp),%edi vpxor %xmm5,%xmm6,%xmm6 movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,8(%esp) vpsrlq $19,%xmm7,%xmm7 xorl %eax,%ecx xorl %edi,%eax addl 4(%esp),%edx vpxor %xmm7,%xmm6,%xmm6 shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx vpshufd $232,%xmm6,%xmm7 addl 88(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx vpslldq $8,%xmm7,%xmm7 addl %edx,%ebx addl 20(%esp),%edx addl %ecx,%ebx vpaddd %xmm7,%xmm3,%xmm3 movl %edx,%ecx shrdl $14,%edx,%edx movl 24(%esp),%esi vpaddd 48(%ebp),%xmm3,%xmm6 xorl %ecx,%edx movl 28(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,20(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 8(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,4(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl (%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 92(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl 16(%esp),%edx addl %ecx,%eax vmovdqa %xmm6,80(%esp) cmpl $66051,64(%ebp) jne L012avx_00_47 movl %edx,%ecx shrdl $14,%edx,%edx movl 20(%esp),%esi xorl %ecx,%edx movl 24(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %eax,%ecx addl %edi,%edx movl 4(%esp),%edi movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,(%esp) xorl %eax,%ecx xorl %edi,%eax addl 28(%esp),%edx shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx addl 32(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx addl %edx,%ebx addl 12(%esp),%edx addl %ecx,%ebx movl %edx,%ecx shrdl $14,%edx,%edx movl 16(%esp),%esi xorl %ecx,%edx movl 20(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,12(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl (%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,28(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 24(%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 36(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl 8(%esp),%edx addl %ecx,%eax movl %edx,%ecx shrdl $14,%edx,%edx movl 12(%esp),%esi xorl %ecx,%edx movl 16(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %eax,%ecx addl %edi,%edx movl 28(%esp),%edi movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,24(%esp) xorl %eax,%ecx xorl %edi,%eax addl 20(%esp),%edx shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx addl 40(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx addl %edx,%ebx addl 4(%esp),%edx addl %ecx,%ebx movl %edx,%ecx shrdl $14,%edx,%edx movl 8(%esp),%esi xorl %ecx,%edx movl 12(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,4(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 24(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,20(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 16(%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 44(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl (%esp),%edx addl %ecx,%eax movl %edx,%ecx shrdl $14,%edx,%edx movl 4(%esp),%esi xorl %ecx,%edx movl 8(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %eax,%ecx addl %edi,%edx movl 20(%esp),%edi movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,16(%esp) xorl %eax,%ecx xorl %edi,%eax addl 12(%esp),%edx shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx addl 48(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx addl %edx,%ebx addl 28(%esp),%edx addl %ecx,%ebx movl %edx,%ecx shrdl $14,%edx,%edx movl (%esp),%esi xorl %ecx,%edx movl 4(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,28(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 16(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,12(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 8(%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 52(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl 24(%esp),%edx addl %ecx,%eax movl %edx,%ecx shrdl $14,%edx,%edx movl 28(%esp),%esi xorl %ecx,%edx movl (%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %eax,%ecx addl %edi,%edx movl 12(%esp),%edi movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,8(%esp) xorl %eax,%ecx xorl %edi,%eax addl 4(%esp),%edx shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx addl 56(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx addl %edx,%ebx addl 20(%esp),%edx addl %ecx,%ebx movl %edx,%ecx shrdl $14,%edx,%edx movl 24(%esp),%esi xorl %ecx,%edx movl 28(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,20(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 8(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,4(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl (%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 60(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl 16(%esp),%edx addl %ecx,%eax movl %edx,%ecx shrdl $14,%edx,%edx movl 20(%esp),%esi xorl %ecx,%edx movl 24(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,16(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %eax,%ecx addl %edi,%edx movl 4(%esp),%edi movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,(%esp) xorl %eax,%ecx xorl %edi,%eax addl 28(%esp),%edx shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx addl 64(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx addl %edx,%ebx addl 12(%esp),%edx addl %ecx,%ebx movl %edx,%ecx shrdl $14,%edx,%edx movl 16(%esp),%esi xorl %ecx,%edx movl 20(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,12(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl (%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,28(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 24(%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 68(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl 8(%esp),%edx addl %ecx,%eax movl %edx,%ecx shrdl $14,%edx,%edx movl 12(%esp),%esi xorl %ecx,%edx movl 16(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,8(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %eax,%ecx addl %edi,%edx movl 28(%esp),%edi movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,24(%esp) xorl %eax,%ecx xorl %edi,%eax addl 20(%esp),%edx shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx addl 72(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx addl %edx,%ebx addl 4(%esp),%edx addl %ecx,%ebx movl %edx,%ecx shrdl $14,%edx,%edx movl 8(%esp),%esi xorl %ecx,%edx movl 12(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,4(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 24(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,20(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 16(%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 76(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl (%esp),%edx addl %ecx,%eax movl %edx,%ecx shrdl $14,%edx,%edx movl 4(%esp),%esi xorl %ecx,%edx movl 8(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %eax,%ecx addl %edi,%edx movl 20(%esp),%edi movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,16(%esp) xorl %eax,%ecx xorl %edi,%eax addl 12(%esp),%edx shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx addl 80(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx addl %edx,%ebx addl 28(%esp),%edx addl %ecx,%ebx movl %edx,%ecx shrdl $14,%edx,%edx movl (%esp),%esi xorl %ecx,%edx movl 4(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,28(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 16(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,12(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl 8(%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 84(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl 24(%esp),%edx addl %ecx,%eax movl %edx,%ecx shrdl $14,%edx,%edx movl 28(%esp),%esi xorl %ecx,%edx movl (%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,24(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %eax,%ecx addl %edi,%edx movl 12(%esp),%edi movl %eax,%esi shrdl $9,%ecx,%ecx movl %eax,8(%esp) xorl %eax,%ecx xorl %edi,%eax addl 4(%esp),%edx shrdl $11,%ecx,%ecx andl %eax,%ebx xorl %esi,%ecx addl 88(%esp),%edx xorl %edi,%ebx shrdl $2,%ecx,%ecx addl %edx,%ebx addl 20(%esp),%edx addl %ecx,%ebx movl %edx,%ecx shrdl $14,%edx,%edx movl 24(%esp),%esi xorl %ecx,%edx movl 28(%esp),%edi xorl %edi,%esi shrdl $5,%edx,%edx andl %ecx,%esi movl %ecx,20(%esp) xorl %ecx,%edx xorl %esi,%edi shrdl $6,%edx,%edx movl %ebx,%ecx addl %edi,%edx movl 8(%esp),%edi movl %ebx,%esi shrdl $9,%ecx,%ecx movl %ebx,4(%esp) xorl %ebx,%ecx xorl %edi,%ebx addl (%esp),%edx shrdl $11,%ecx,%ecx andl %ebx,%eax xorl %esi,%ecx addl 92(%esp),%edx xorl %edi,%eax shrdl $2,%ecx,%ecx addl %edx,%eax addl 16(%esp),%edx addl %ecx,%eax movl 96(%esp),%esi xorl %edi,%ebx movl 12(%esp),%ecx addl (%esi),%eax addl 4(%esi),%ebx addl 8(%esi),%edi addl 12(%esi),%ecx movl %eax,(%esi) movl %ebx,4(%esi) movl %edi,8(%esi) movl %ecx,12(%esi) movl %ebx,4(%esp) xorl %edi,%ebx movl %edi,8(%esp) movl %ecx,12(%esp) movl 20(%esp),%edi movl 24(%esp),%ecx addl 16(%esi),%edx addl 20(%esi),%edi addl 24(%esi),%ecx movl %edx,16(%esi) movl %edi,20(%esi) movl %edi,20(%esp) movl 28(%esp),%edi movl %ecx,24(%esi) addl 28(%esi),%edi movl %ecx,24(%esp) movl %edi,28(%esi) movl %edi,28(%esp) movl 100(%esp),%edi vmovdqa 64(%ebp),%xmm7 subl $192,%ebp cmpl 104(%esp),%edi jb L011grand_avx movl 108(%esp),%esp vzeroall popl %edi popl %esi popl %ebx popl %ebp ret #endif // !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
68,212
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86/crypto/fipsmodule/sha1-586.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__) .text .globl _sha1_block_data_order_nohw .private_extern _sha1_block_data_order_nohw .align 4 _sha1_block_data_order_nohw: L_sha1_block_data_order_nohw_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi movl 20(%esp),%ebp movl 24(%esp),%esi movl 28(%esp),%eax subl $76,%esp shll $6,%eax addl %esi,%eax movl %eax,104(%esp) movl 16(%ebp),%edi jmp L000loop .align 4,0x90 L000loop: movl (%esi),%eax movl 4(%esi),%ebx movl 8(%esi),%ecx movl 12(%esi),%edx bswap %eax bswap %ebx bswap %ecx bswap %edx movl %eax,(%esp) movl %ebx,4(%esp) movl %ecx,8(%esp) movl %edx,12(%esp) movl 16(%esi),%eax movl 20(%esi),%ebx movl 24(%esi),%ecx movl 28(%esi),%edx bswap %eax bswap %ebx bswap %ecx bswap %edx movl %eax,16(%esp) movl %ebx,20(%esp) movl %ecx,24(%esp) movl %edx,28(%esp) movl 32(%esi),%eax movl 36(%esi),%ebx movl 40(%esi),%ecx movl 44(%esi),%edx bswap %eax bswap %ebx bswap %ecx bswap %edx movl %eax,32(%esp) movl %ebx,36(%esp) movl %ecx,40(%esp) movl %edx,44(%esp) movl 48(%esi),%eax movl 52(%esi),%ebx movl 56(%esi),%ecx movl 60(%esi),%edx bswap %eax bswap %ebx bswap %ecx bswap %edx movl %eax,48(%esp) movl %ebx,52(%esp) movl %ecx,56(%esp) movl %edx,60(%esp) movl %esi,100(%esp) movl (%ebp),%eax movl 4(%ebp),%ebx movl 8(%ebp),%ecx movl 12(%ebp),%edx # 00_15 0 movl %ecx,%esi movl %eax,%ebp roll $5,%ebp xorl %edx,%esi addl %edi,%ebp movl (%esp),%edi andl %ebx,%esi rorl $2,%ebx xorl %edx,%esi leal 1518500249(%ebp,%edi,1),%ebp addl %esi,%ebp # 00_15 1 movl %ebx,%edi movl %ebp,%esi roll $5,%ebp xorl %ecx,%edi addl %edx,%ebp movl 4(%esp),%edx andl %eax,%edi rorl $2,%eax xorl %ecx,%edi leal 1518500249(%ebp,%edx,1),%ebp addl %edi,%ebp # 00_15 2 movl %eax,%edx movl %ebp,%edi roll $5,%ebp xorl %ebx,%edx addl %ecx,%ebp movl 8(%esp),%ecx andl %esi,%edx rorl $2,%esi xorl %ebx,%edx leal 1518500249(%ebp,%ecx,1),%ebp addl %edx,%ebp # 00_15 3 movl %esi,%ecx movl %ebp,%edx roll $5,%ebp xorl %eax,%ecx addl %ebx,%ebp movl 12(%esp),%ebx andl %edi,%ecx rorl $2,%edi xorl %eax,%ecx leal 1518500249(%ebp,%ebx,1),%ebp addl %ecx,%ebp # 00_15 4 movl %edi,%ebx movl %ebp,%ecx roll $5,%ebp xorl %esi,%ebx addl %eax,%ebp movl 16(%esp),%eax andl %edx,%ebx rorl $2,%edx xorl %esi,%ebx leal 1518500249(%ebp,%eax,1),%ebp addl %ebx,%ebp # 00_15 5 movl %edx,%eax movl %ebp,%ebx roll $5,%ebp xorl %edi,%eax addl %esi,%ebp movl 20(%esp),%esi andl %ecx,%eax rorl $2,%ecx xorl %edi,%eax leal 1518500249(%ebp,%esi,1),%ebp addl %eax,%ebp # 00_15 6 movl %ecx,%esi movl %ebp,%eax roll $5,%ebp xorl %edx,%esi addl %edi,%ebp movl 24(%esp),%edi andl %ebx,%esi rorl $2,%ebx xorl %edx,%esi leal 1518500249(%ebp,%edi,1),%ebp addl %esi,%ebp # 00_15 7 movl %ebx,%edi movl %ebp,%esi roll $5,%ebp xorl %ecx,%edi addl %edx,%ebp movl 28(%esp),%edx andl %eax,%edi rorl $2,%eax xorl %ecx,%edi leal 1518500249(%ebp,%edx,1),%ebp addl %edi,%ebp # 00_15 8 movl %eax,%edx movl %ebp,%edi roll $5,%ebp xorl %ebx,%edx addl %ecx,%ebp movl 32(%esp),%ecx andl %esi,%edx rorl $2,%esi xorl %ebx,%edx leal 1518500249(%ebp,%ecx,1),%ebp addl %edx,%ebp # 00_15 9 movl %esi,%ecx movl %ebp,%edx roll $5,%ebp xorl %eax,%ecx addl %ebx,%ebp movl 36(%esp),%ebx andl %edi,%ecx rorl $2,%edi xorl %eax,%ecx leal 1518500249(%ebp,%ebx,1),%ebp addl %ecx,%ebp # 00_15 10 movl %edi,%ebx movl %ebp,%ecx roll $5,%ebp xorl %esi,%ebx addl %eax,%ebp movl 40(%esp),%eax andl %edx,%ebx rorl $2,%edx xorl %esi,%ebx leal 1518500249(%ebp,%eax,1),%ebp addl %ebx,%ebp # 00_15 11 movl %edx,%eax movl %ebp,%ebx roll $5,%ebp xorl %edi,%eax addl %esi,%ebp movl 44(%esp),%esi andl %ecx,%eax rorl $2,%ecx xorl %edi,%eax leal 1518500249(%ebp,%esi,1),%ebp addl %eax,%ebp # 00_15 12 movl %ecx,%esi movl %ebp,%eax roll $5,%ebp xorl %edx,%esi addl %edi,%ebp movl 48(%esp),%edi andl %ebx,%esi rorl $2,%ebx xorl %edx,%esi leal 1518500249(%ebp,%edi,1),%ebp addl %esi,%ebp # 00_15 13 movl %ebx,%edi movl %ebp,%esi roll $5,%ebp xorl %ecx,%edi addl %edx,%ebp movl 52(%esp),%edx andl %eax,%edi rorl $2,%eax xorl %ecx,%edi leal 1518500249(%ebp,%edx,1),%ebp addl %edi,%ebp # 00_15 14 movl %eax,%edx movl %ebp,%edi roll $5,%ebp xorl %ebx,%edx addl %ecx,%ebp movl 56(%esp),%ecx andl %esi,%edx rorl $2,%esi xorl %ebx,%edx leal 1518500249(%ebp,%ecx,1),%ebp addl %edx,%ebp # 00_15 15 movl %esi,%ecx movl %ebp,%edx roll $5,%ebp xorl %eax,%ecx addl %ebx,%ebp movl 60(%esp),%ebx andl %edi,%ecx rorl $2,%edi xorl %eax,%ecx leal 1518500249(%ebp,%ebx,1),%ebp movl (%esp),%ebx addl %ebp,%ecx # 16_19 16 movl %edi,%ebp xorl 8(%esp),%ebx xorl %esi,%ebp xorl 32(%esp),%ebx andl %edx,%ebp xorl 52(%esp),%ebx roll $1,%ebx xorl %esi,%ebp addl %ebp,%eax movl %ecx,%ebp rorl $2,%edx movl %ebx,(%esp) roll $5,%ebp leal 1518500249(%ebx,%eax,1),%ebx movl 4(%esp),%eax addl %ebp,%ebx # 16_19 17 movl %edx,%ebp xorl 12(%esp),%eax xorl %edi,%ebp xorl 36(%esp),%eax andl %ecx,%ebp xorl 56(%esp),%eax roll $1,%eax xorl %edi,%ebp addl %ebp,%esi movl %ebx,%ebp rorl $2,%ecx movl %eax,4(%esp) roll $5,%ebp leal 1518500249(%eax,%esi,1),%eax movl 8(%esp),%esi addl %ebp,%eax # 16_19 18 movl %ecx,%ebp xorl 16(%esp),%esi xorl %edx,%ebp xorl 40(%esp),%esi andl %ebx,%ebp xorl 60(%esp),%esi roll $1,%esi xorl %edx,%ebp addl %ebp,%edi movl %eax,%ebp rorl $2,%ebx movl %esi,8(%esp) roll $5,%ebp leal 1518500249(%esi,%edi,1),%esi movl 12(%esp),%edi addl %ebp,%esi # 16_19 19 movl %ebx,%ebp xorl 20(%esp),%edi xorl %ecx,%ebp xorl 44(%esp),%edi andl %eax,%ebp xorl (%esp),%edi roll $1,%edi xorl %ecx,%ebp addl %ebp,%edx movl %esi,%ebp rorl $2,%eax movl %edi,12(%esp) roll $5,%ebp leal 1518500249(%edi,%edx,1),%edi movl 16(%esp),%edx addl %ebp,%edi # 20_39 20 movl %esi,%ebp xorl 24(%esp),%edx xorl %eax,%ebp xorl 48(%esp),%edx xorl %ebx,%ebp xorl 4(%esp),%edx roll $1,%edx addl %ebp,%ecx rorl $2,%esi movl %edi,%ebp roll $5,%ebp movl %edx,16(%esp) leal 1859775393(%edx,%ecx,1),%edx movl 20(%esp),%ecx addl %ebp,%edx # 20_39 21 movl %edi,%ebp xorl 28(%esp),%ecx xorl %esi,%ebp xorl 52(%esp),%ecx xorl %eax,%ebp xorl 8(%esp),%ecx roll $1,%ecx addl %ebp,%ebx rorl $2,%edi movl %edx,%ebp roll $5,%ebp movl %ecx,20(%esp) leal 1859775393(%ecx,%ebx,1),%ecx movl 24(%esp),%ebx addl %ebp,%ecx # 20_39 22 movl %edx,%ebp xorl 32(%esp),%ebx xorl %edi,%ebp xorl 56(%esp),%ebx xorl %esi,%ebp xorl 12(%esp),%ebx roll $1,%ebx addl %ebp,%eax rorl $2,%edx movl %ecx,%ebp roll $5,%ebp movl %ebx,24(%esp) leal 1859775393(%ebx,%eax,1),%ebx movl 28(%esp),%eax addl %ebp,%ebx # 20_39 23 movl %ecx,%ebp xorl 36(%esp),%eax xorl %edx,%ebp xorl 60(%esp),%eax xorl %edi,%ebp xorl 16(%esp),%eax roll $1,%eax addl %ebp,%esi rorl $2,%ecx movl %ebx,%ebp roll $5,%ebp movl %eax,28(%esp) leal 1859775393(%eax,%esi,1),%eax movl 32(%esp),%esi addl %ebp,%eax # 20_39 24 movl %ebx,%ebp xorl 40(%esp),%esi xorl %ecx,%ebp xorl (%esp),%esi xorl %edx,%ebp xorl 20(%esp),%esi roll $1,%esi addl %ebp,%edi rorl $2,%ebx movl %eax,%ebp roll $5,%ebp movl %esi,32(%esp) leal 1859775393(%esi,%edi,1),%esi movl 36(%esp),%edi addl %ebp,%esi # 20_39 25 movl %eax,%ebp xorl 44(%esp),%edi xorl %ebx,%ebp xorl 4(%esp),%edi xorl %ecx,%ebp xorl 24(%esp),%edi roll $1,%edi addl %ebp,%edx rorl $2,%eax movl %esi,%ebp roll $5,%ebp movl %edi,36(%esp) leal 1859775393(%edi,%edx,1),%edi movl 40(%esp),%edx addl %ebp,%edi # 20_39 26 movl %esi,%ebp xorl 48(%esp),%edx xorl %eax,%ebp xorl 8(%esp),%edx xorl %ebx,%ebp xorl 28(%esp),%edx roll $1,%edx addl %ebp,%ecx rorl $2,%esi movl %edi,%ebp roll $5,%ebp movl %edx,40(%esp) leal 1859775393(%edx,%ecx,1),%edx movl 44(%esp),%ecx addl %ebp,%edx # 20_39 27 movl %edi,%ebp xorl 52(%esp),%ecx xorl %esi,%ebp xorl 12(%esp),%ecx xorl %eax,%ebp xorl 32(%esp),%ecx roll $1,%ecx addl %ebp,%ebx rorl $2,%edi movl %edx,%ebp roll $5,%ebp movl %ecx,44(%esp) leal 1859775393(%ecx,%ebx,1),%ecx movl 48(%esp),%ebx addl %ebp,%ecx # 20_39 28 movl %edx,%ebp xorl 56(%esp),%ebx xorl %edi,%ebp xorl 16(%esp),%ebx xorl %esi,%ebp xorl 36(%esp),%ebx roll $1,%ebx addl %ebp,%eax rorl $2,%edx movl %ecx,%ebp roll $5,%ebp movl %ebx,48(%esp) leal 1859775393(%ebx,%eax,1),%ebx movl 52(%esp),%eax addl %ebp,%ebx # 20_39 29 movl %ecx,%ebp xorl 60(%esp),%eax xorl %edx,%ebp xorl 20(%esp),%eax xorl %edi,%ebp xorl 40(%esp),%eax roll $1,%eax addl %ebp,%esi rorl $2,%ecx movl %ebx,%ebp roll $5,%ebp movl %eax,52(%esp) leal 1859775393(%eax,%esi,1),%eax movl 56(%esp),%esi addl %ebp,%eax # 20_39 30 movl %ebx,%ebp xorl (%esp),%esi xorl %ecx,%ebp xorl 24(%esp),%esi xorl %edx,%ebp xorl 44(%esp),%esi roll $1,%esi addl %ebp,%edi rorl $2,%ebx movl %eax,%ebp roll $5,%ebp movl %esi,56(%esp) leal 1859775393(%esi,%edi,1),%esi movl 60(%esp),%edi addl %ebp,%esi # 20_39 31 movl %eax,%ebp xorl 4(%esp),%edi xorl %ebx,%ebp xorl 28(%esp),%edi xorl %ecx,%ebp xorl 48(%esp),%edi roll $1,%edi addl %ebp,%edx rorl $2,%eax movl %esi,%ebp roll $5,%ebp movl %edi,60(%esp) leal 1859775393(%edi,%edx,1),%edi movl (%esp),%edx addl %ebp,%edi # 20_39 32 movl %esi,%ebp xorl 8(%esp),%edx xorl %eax,%ebp xorl 32(%esp),%edx xorl %ebx,%ebp xorl 52(%esp),%edx roll $1,%edx addl %ebp,%ecx rorl $2,%esi movl %edi,%ebp roll $5,%ebp movl %edx,(%esp) leal 1859775393(%edx,%ecx,1),%edx movl 4(%esp),%ecx addl %ebp,%edx # 20_39 33 movl %edi,%ebp xorl 12(%esp),%ecx xorl %esi,%ebp xorl 36(%esp),%ecx xorl %eax,%ebp xorl 56(%esp),%ecx roll $1,%ecx addl %ebp,%ebx rorl $2,%edi movl %edx,%ebp roll $5,%ebp movl %ecx,4(%esp) leal 1859775393(%ecx,%ebx,1),%ecx movl 8(%esp),%ebx addl %ebp,%ecx # 20_39 34 movl %edx,%ebp xorl 16(%esp),%ebx xorl %edi,%ebp xorl 40(%esp),%ebx xorl %esi,%ebp xorl 60(%esp),%ebx roll $1,%ebx addl %ebp,%eax rorl $2,%edx movl %ecx,%ebp roll $5,%ebp movl %ebx,8(%esp) leal 1859775393(%ebx,%eax,1),%ebx movl 12(%esp),%eax addl %ebp,%ebx # 20_39 35 movl %ecx,%ebp xorl 20(%esp),%eax xorl %edx,%ebp xorl 44(%esp),%eax xorl %edi,%ebp xorl (%esp),%eax roll $1,%eax addl %ebp,%esi rorl $2,%ecx movl %ebx,%ebp roll $5,%ebp movl %eax,12(%esp) leal 1859775393(%eax,%esi,1),%eax movl 16(%esp),%esi addl %ebp,%eax # 20_39 36 movl %ebx,%ebp xorl 24(%esp),%esi xorl %ecx,%ebp xorl 48(%esp),%esi xorl %edx,%ebp xorl 4(%esp),%esi roll $1,%esi addl %ebp,%edi rorl $2,%ebx movl %eax,%ebp roll $5,%ebp movl %esi,16(%esp) leal 1859775393(%esi,%edi,1),%esi movl 20(%esp),%edi addl %ebp,%esi # 20_39 37 movl %eax,%ebp xorl 28(%esp),%edi xorl %ebx,%ebp xorl 52(%esp),%edi xorl %ecx,%ebp xorl 8(%esp),%edi roll $1,%edi addl %ebp,%edx rorl $2,%eax movl %esi,%ebp roll $5,%ebp movl %edi,20(%esp) leal 1859775393(%edi,%edx,1),%edi movl 24(%esp),%edx addl %ebp,%edi # 20_39 38 movl %esi,%ebp xorl 32(%esp),%edx xorl %eax,%ebp xorl 56(%esp),%edx xorl %ebx,%ebp xorl 12(%esp),%edx roll $1,%edx addl %ebp,%ecx rorl $2,%esi movl %edi,%ebp roll $5,%ebp movl %edx,24(%esp) leal 1859775393(%edx,%ecx,1),%edx movl 28(%esp),%ecx addl %ebp,%edx # 20_39 39 movl %edi,%ebp xorl 36(%esp),%ecx xorl %esi,%ebp xorl 60(%esp),%ecx xorl %eax,%ebp xorl 16(%esp),%ecx roll $1,%ecx addl %ebp,%ebx rorl $2,%edi movl %edx,%ebp roll $5,%ebp movl %ecx,28(%esp) leal 1859775393(%ecx,%ebx,1),%ecx movl 32(%esp),%ebx addl %ebp,%ecx # 40_59 40 movl %edi,%ebp xorl 40(%esp),%ebx xorl %esi,%ebp xorl (%esp),%ebx andl %edx,%ebp xorl 20(%esp),%ebx roll $1,%ebx addl %eax,%ebp rorl $2,%edx movl %ecx,%eax roll $5,%eax movl %ebx,32(%esp) leal 2400959708(%ebx,%ebp,1),%ebx movl %edi,%ebp addl %eax,%ebx andl %esi,%ebp movl 36(%esp),%eax addl %ebp,%ebx # 40_59 41 movl %edx,%ebp xorl 44(%esp),%eax xorl %edi,%ebp xorl 4(%esp),%eax andl %ecx,%ebp xorl 24(%esp),%eax roll $1,%eax addl %esi,%ebp rorl $2,%ecx movl %ebx,%esi roll $5,%esi movl %eax,36(%esp) leal 2400959708(%eax,%ebp,1),%eax movl %edx,%ebp addl %esi,%eax andl %edi,%ebp movl 40(%esp),%esi addl %ebp,%eax # 40_59 42 movl %ecx,%ebp xorl 48(%esp),%esi xorl %edx,%ebp xorl 8(%esp),%esi andl %ebx,%ebp xorl 28(%esp),%esi roll $1,%esi addl %edi,%ebp rorl $2,%ebx movl %eax,%edi roll $5,%edi movl %esi,40(%esp) leal 2400959708(%esi,%ebp,1),%esi movl %ecx,%ebp addl %edi,%esi andl %edx,%ebp movl 44(%esp),%edi addl %ebp,%esi # 40_59 43 movl %ebx,%ebp xorl 52(%esp),%edi xorl %ecx,%ebp xorl 12(%esp),%edi andl %eax,%ebp xorl 32(%esp),%edi roll $1,%edi addl %edx,%ebp rorl $2,%eax movl %esi,%edx roll $5,%edx movl %edi,44(%esp) leal 2400959708(%edi,%ebp,1),%edi movl %ebx,%ebp addl %edx,%edi andl %ecx,%ebp movl 48(%esp),%edx addl %ebp,%edi # 40_59 44 movl %eax,%ebp xorl 56(%esp),%edx xorl %ebx,%ebp xorl 16(%esp),%edx andl %esi,%ebp xorl 36(%esp),%edx roll $1,%edx addl %ecx,%ebp rorl $2,%esi movl %edi,%ecx roll $5,%ecx movl %edx,48(%esp) leal 2400959708(%edx,%ebp,1),%edx movl %eax,%ebp addl %ecx,%edx andl %ebx,%ebp movl 52(%esp),%ecx addl %ebp,%edx # 40_59 45 movl %esi,%ebp xorl 60(%esp),%ecx xorl %eax,%ebp xorl 20(%esp),%ecx andl %edi,%ebp xorl 40(%esp),%ecx roll $1,%ecx addl %ebx,%ebp rorl $2,%edi movl %edx,%ebx roll $5,%ebx movl %ecx,52(%esp) leal 2400959708(%ecx,%ebp,1),%ecx movl %esi,%ebp addl %ebx,%ecx andl %eax,%ebp movl 56(%esp),%ebx addl %ebp,%ecx # 40_59 46 movl %edi,%ebp xorl (%esp),%ebx xorl %esi,%ebp xorl 24(%esp),%ebx andl %edx,%ebp xorl 44(%esp),%ebx roll $1,%ebx addl %eax,%ebp rorl $2,%edx movl %ecx,%eax roll $5,%eax movl %ebx,56(%esp) leal 2400959708(%ebx,%ebp,1),%ebx movl %edi,%ebp addl %eax,%ebx andl %esi,%ebp movl 60(%esp),%eax addl %ebp,%ebx # 40_59 47 movl %edx,%ebp xorl 4(%esp),%eax xorl %edi,%ebp xorl 28(%esp),%eax andl %ecx,%ebp xorl 48(%esp),%eax roll $1,%eax addl %esi,%ebp rorl $2,%ecx movl %ebx,%esi roll $5,%esi movl %eax,60(%esp) leal 2400959708(%eax,%ebp,1),%eax movl %edx,%ebp addl %esi,%eax andl %edi,%ebp movl (%esp),%esi addl %ebp,%eax # 40_59 48 movl %ecx,%ebp xorl 8(%esp),%esi xorl %edx,%ebp xorl 32(%esp),%esi andl %ebx,%ebp xorl 52(%esp),%esi roll $1,%esi addl %edi,%ebp rorl $2,%ebx movl %eax,%edi roll $5,%edi movl %esi,(%esp) leal 2400959708(%esi,%ebp,1),%esi movl %ecx,%ebp addl %edi,%esi andl %edx,%ebp movl 4(%esp),%edi addl %ebp,%esi # 40_59 49 movl %ebx,%ebp xorl 12(%esp),%edi xorl %ecx,%ebp xorl 36(%esp),%edi andl %eax,%ebp xorl 56(%esp),%edi roll $1,%edi addl %edx,%ebp rorl $2,%eax movl %esi,%edx roll $5,%edx movl %edi,4(%esp) leal 2400959708(%edi,%ebp,1),%edi movl %ebx,%ebp addl %edx,%edi andl %ecx,%ebp movl 8(%esp),%edx addl %ebp,%edi # 40_59 50 movl %eax,%ebp xorl 16(%esp),%edx xorl %ebx,%ebp xorl 40(%esp),%edx andl %esi,%ebp xorl 60(%esp),%edx roll $1,%edx addl %ecx,%ebp rorl $2,%esi movl %edi,%ecx roll $5,%ecx movl %edx,8(%esp) leal 2400959708(%edx,%ebp,1),%edx movl %eax,%ebp addl %ecx,%edx andl %ebx,%ebp movl 12(%esp),%ecx addl %ebp,%edx # 40_59 51 movl %esi,%ebp xorl 20(%esp),%ecx xorl %eax,%ebp xorl 44(%esp),%ecx andl %edi,%ebp xorl (%esp),%ecx roll $1,%ecx addl %ebx,%ebp rorl $2,%edi movl %edx,%ebx roll $5,%ebx movl %ecx,12(%esp) leal 2400959708(%ecx,%ebp,1),%ecx movl %esi,%ebp addl %ebx,%ecx andl %eax,%ebp movl 16(%esp),%ebx addl %ebp,%ecx # 40_59 52 movl %edi,%ebp xorl 24(%esp),%ebx xorl %esi,%ebp xorl 48(%esp),%ebx andl %edx,%ebp xorl 4(%esp),%ebx roll $1,%ebx addl %eax,%ebp rorl $2,%edx movl %ecx,%eax roll $5,%eax movl %ebx,16(%esp) leal 2400959708(%ebx,%ebp,1),%ebx movl %edi,%ebp addl %eax,%ebx andl %esi,%ebp movl 20(%esp),%eax addl %ebp,%ebx # 40_59 53 movl %edx,%ebp xorl 28(%esp),%eax xorl %edi,%ebp xorl 52(%esp),%eax andl %ecx,%ebp xorl 8(%esp),%eax roll $1,%eax addl %esi,%ebp rorl $2,%ecx movl %ebx,%esi roll $5,%esi movl %eax,20(%esp) leal 2400959708(%eax,%ebp,1),%eax movl %edx,%ebp addl %esi,%eax andl %edi,%ebp movl 24(%esp),%esi addl %ebp,%eax # 40_59 54 movl %ecx,%ebp xorl 32(%esp),%esi xorl %edx,%ebp xorl 56(%esp),%esi andl %ebx,%ebp xorl 12(%esp),%esi roll $1,%esi addl %edi,%ebp rorl $2,%ebx movl %eax,%edi roll $5,%edi movl %esi,24(%esp) leal 2400959708(%esi,%ebp,1),%esi movl %ecx,%ebp addl %edi,%esi andl %edx,%ebp movl 28(%esp),%edi addl %ebp,%esi # 40_59 55 movl %ebx,%ebp xorl 36(%esp),%edi xorl %ecx,%ebp xorl 60(%esp),%edi andl %eax,%ebp xorl 16(%esp),%edi roll $1,%edi addl %edx,%ebp rorl $2,%eax movl %esi,%edx roll $5,%edx movl %edi,28(%esp) leal 2400959708(%edi,%ebp,1),%edi movl %ebx,%ebp addl %edx,%edi andl %ecx,%ebp movl 32(%esp),%edx addl %ebp,%edi # 40_59 56 movl %eax,%ebp xorl 40(%esp),%edx xorl %ebx,%ebp xorl (%esp),%edx andl %esi,%ebp xorl 20(%esp),%edx roll $1,%edx addl %ecx,%ebp rorl $2,%esi movl %edi,%ecx roll $5,%ecx movl %edx,32(%esp) leal 2400959708(%edx,%ebp,1),%edx movl %eax,%ebp addl %ecx,%edx andl %ebx,%ebp movl 36(%esp),%ecx addl %ebp,%edx # 40_59 57 movl %esi,%ebp xorl 44(%esp),%ecx xorl %eax,%ebp xorl 4(%esp),%ecx andl %edi,%ebp xorl 24(%esp),%ecx roll $1,%ecx addl %ebx,%ebp rorl $2,%edi movl %edx,%ebx roll $5,%ebx movl %ecx,36(%esp) leal 2400959708(%ecx,%ebp,1),%ecx movl %esi,%ebp addl %ebx,%ecx andl %eax,%ebp movl 40(%esp),%ebx addl %ebp,%ecx # 40_59 58 movl %edi,%ebp xorl 48(%esp),%ebx xorl %esi,%ebp xorl 8(%esp),%ebx andl %edx,%ebp xorl 28(%esp),%ebx roll $1,%ebx addl %eax,%ebp rorl $2,%edx movl %ecx,%eax roll $5,%eax movl %ebx,40(%esp) leal 2400959708(%ebx,%ebp,1),%ebx movl %edi,%ebp addl %eax,%ebx andl %esi,%ebp movl 44(%esp),%eax addl %ebp,%ebx # 40_59 59 movl %edx,%ebp xorl 52(%esp),%eax xorl %edi,%ebp xorl 12(%esp),%eax andl %ecx,%ebp xorl 32(%esp),%eax roll $1,%eax addl %esi,%ebp rorl $2,%ecx movl %ebx,%esi roll $5,%esi movl %eax,44(%esp) leal 2400959708(%eax,%ebp,1),%eax movl %edx,%ebp addl %esi,%eax andl %edi,%ebp movl 48(%esp),%esi addl %ebp,%eax # 20_39 60 movl %ebx,%ebp xorl 56(%esp),%esi xorl %ecx,%ebp xorl 16(%esp),%esi xorl %edx,%ebp xorl 36(%esp),%esi roll $1,%esi addl %ebp,%edi rorl $2,%ebx movl %eax,%ebp roll $5,%ebp movl %esi,48(%esp) leal 3395469782(%esi,%edi,1),%esi movl 52(%esp),%edi addl %ebp,%esi # 20_39 61 movl %eax,%ebp xorl 60(%esp),%edi xorl %ebx,%ebp xorl 20(%esp),%edi xorl %ecx,%ebp xorl 40(%esp),%edi roll $1,%edi addl %ebp,%edx rorl $2,%eax movl %esi,%ebp roll $5,%ebp movl %edi,52(%esp) leal 3395469782(%edi,%edx,1),%edi movl 56(%esp),%edx addl %ebp,%edi # 20_39 62 movl %esi,%ebp xorl (%esp),%edx xorl %eax,%ebp xorl 24(%esp),%edx xorl %ebx,%ebp xorl 44(%esp),%edx roll $1,%edx addl %ebp,%ecx rorl $2,%esi movl %edi,%ebp roll $5,%ebp movl %edx,56(%esp) leal 3395469782(%edx,%ecx,1),%edx movl 60(%esp),%ecx addl %ebp,%edx # 20_39 63 movl %edi,%ebp xorl 4(%esp),%ecx xorl %esi,%ebp xorl 28(%esp),%ecx xorl %eax,%ebp xorl 48(%esp),%ecx roll $1,%ecx addl %ebp,%ebx rorl $2,%edi movl %edx,%ebp roll $5,%ebp movl %ecx,60(%esp) leal 3395469782(%ecx,%ebx,1),%ecx movl (%esp),%ebx addl %ebp,%ecx # 20_39 64 movl %edx,%ebp xorl 8(%esp),%ebx xorl %edi,%ebp xorl 32(%esp),%ebx xorl %esi,%ebp xorl 52(%esp),%ebx roll $1,%ebx addl %ebp,%eax rorl $2,%edx movl %ecx,%ebp roll $5,%ebp movl %ebx,(%esp) leal 3395469782(%ebx,%eax,1),%ebx movl 4(%esp),%eax addl %ebp,%ebx # 20_39 65 movl %ecx,%ebp xorl 12(%esp),%eax xorl %edx,%ebp xorl 36(%esp),%eax xorl %edi,%ebp xorl 56(%esp),%eax roll $1,%eax addl %ebp,%esi rorl $2,%ecx movl %ebx,%ebp roll $5,%ebp movl %eax,4(%esp) leal 3395469782(%eax,%esi,1),%eax movl 8(%esp),%esi addl %ebp,%eax # 20_39 66 movl %ebx,%ebp xorl 16(%esp),%esi xorl %ecx,%ebp xorl 40(%esp),%esi xorl %edx,%ebp xorl 60(%esp),%esi roll $1,%esi addl %ebp,%edi rorl $2,%ebx movl %eax,%ebp roll $5,%ebp movl %esi,8(%esp) leal 3395469782(%esi,%edi,1),%esi movl 12(%esp),%edi addl %ebp,%esi # 20_39 67 movl %eax,%ebp xorl 20(%esp),%edi xorl %ebx,%ebp xorl 44(%esp),%edi xorl %ecx,%ebp xorl (%esp),%edi roll $1,%edi addl %ebp,%edx rorl $2,%eax movl %esi,%ebp roll $5,%ebp movl %edi,12(%esp) leal 3395469782(%edi,%edx,1),%edi movl 16(%esp),%edx addl %ebp,%edi # 20_39 68 movl %esi,%ebp xorl 24(%esp),%edx xorl %eax,%ebp xorl 48(%esp),%edx xorl %ebx,%ebp xorl 4(%esp),%edx roll $1,%edx addl %ebp,%ecx rorl $2,%esi movl %edi,%ebp roll $5,%ebp movl %edx,16(%esp) leal 3395469782(%edx,%ecx,1),%edx movl 20(%esp),%ecx addl %ebp,%edx # 20_39 69 movl %edi,%ebp xorl 28(%esp),%ecx xorl %esi,%ebp xorl 52(%esp),%ecx xorl %eax,%ebp xorl 8(%esp),%ecx roll $1,%ecx addl %ebp,%ebx rorl $2,%edi movl %edx,%ebp roll $5,%ebp movl %ecx,20(%esp) leal 3395469782(%ecx,%ebx,1),%ecx movl 24(%esp),%ebx addl %ebp,%ecx # 20_39 70 movl %edx,%ebp xorl 32(%esp),%ebx xorl %edi,%ebp xorl 56(%esp),%ebx xorl %esi,%ebp xorl 12(%esp),%ebx roll $1,%ebx addl %ebp,%eax rorl $2,%edx movl %ecx,%ebp roll $5,%ebp movl %ebx,24(%esp) leal 3395469782(%ebx,%eax,1),%ebx movl 28(%esp),%eax addl %ebp,%ebx # 20_39 71 movl %ecx,%ebp xorl 36(%esp),%eax xorl %edx,%ebp xorl 60(%esp),%eax xorl %edi,%ebp xorl 16(%esp),%eax roll $1,%eax addl %ebp,%esi rorl $2,%ecx movl %ebx,%ebp roll $5,%ebp movl %eax,28(%esp) leal 3395469782(%eax,%esi,1),%eax movl 32(%esp),%esi addl %ebp,%eax # 20_39 72 movl %ebx,%ebp xorl 40(%esp),%esi xorl %ecx,%ebp xorl (%esp),%esi xorl %edx,%ebp xorl 20(%esp),%esi roll $1,%esi addl %ebp,%edi rorl $2,%ebx movl %eax,%ebp roll $5,%ebp movl %esi,32(%esp) leal 3395469782(%esi,%edi,1),%esi movl 36(%esp),%edi addl %ebp,%esi # 20_39 73 movl %eax,%ebp xorl 44(%esp),%edi xorl %ebx,%ebp xorl 4(%esp),%edi xorl %ecx,%ebp xorl 24(%esp),%edi roll $1,%edi addl %ebp,%edx rorl $2,%eax movl %esi,%ebp roll $5,%ebp movl %edi,36(%esp) leal 3395469782(%edi,%edx,1),%edi movl 40(%esp),%edx addl %ebp,%edi # 20_39 74 movl %esi,%ebp xorl 48(%esp),%edx xorl %eax,%ebp xorl 8(%esp),%edx xorl %ebx,%ebp xorl 28(%esp),%edx roll $1,%edx addl %ebp,%ecx rorl $2,%esi movl %edi,%ebp roll $5,%ebp movl %edx,40(%esp) leal 3395469782(%edx,%ecx,1),%edx movl 44(%esp),%ecx addl %ebp,%edx # 20_39 75 movl %edi,%ebp xorl 52(%esp),%ecx xorl %esi,%ebp xorl 12(%esp),%ecx xorl %eax,%ebp xorl 32(%esp),%ecx roll $1,%ecx addl %ebp,%ebx rorl $2,%edi movl %edx,%ebp roll $5,%ebp movl %ecx,44(%esp) leal 3395469782(%ecx,%ebx,1),%ecx movl 48(%esp),%ebx addl %ebp,%ecx # 20_39 76 movl %edx,%ebp xorl 56(%esp),%ebx xorl %edi,%ebp xorl 16(%esp),%ebx xorl %esi,%ebp xorl 36(%esp),%ebx roll $1,%ebx addl %ebp,%eax rorl $2,%edx movl %ecx,%ebp roll $5,%ebp movl %ebx,48(%esp) leal 3395469782(%ebx,%eax,1),%ebx movl 52(%esp),%eax addl %ebp,%ebx # 20_39 77 movl %ecx,%ebp xorl 60(%esp),%eax xorl %edx,%ebp xorl 20(%esp),%eax xorl %edi,%ebp xorl 40(%esp),%eax roll $1,%eax addl %ebp,%esi rorl $2,%ecx movl %ebx,%ebp roll $5,%ebp leal 3395469782(%eax,%esi,1),%eax movl 56(%esp),%esi addl %ebp,%eax # 20_39 78 movl %ebx,%ebp xorl (%esp),%esi xorl %ecx,%ebp xorl 24(%esp),%esi xorl %edx,%ebp xorl 44(%esp),%esi roll $1,%esi addl %ebp,%edi rorl $2,%ebx movl %eax,%ebp roll $5,%ebp leal 3395469782(%esi,%edi,1),%esi movl 60(%esp),%edi addl %ebp,%esi # 20_39 79 movl %eax,%ebp xorl 4(%esp),%edi xorl %ebx,%ebp xorl 28(%esp),%edi xorl %ecx,%ebp xorl 48(%esp),%edi roll $1,%edi addl %ebp,%edx rorl $2,%eax movl %esi,%ebp roll $5,%ebp leal 3395469782(%edi,%edx,1),%edi addl %ebp,%edi movl 96(%esp),%ebp movl 100(%esp),%edx addl (%ebp),%edi addl 4(%ebp),%esi addl 8(%ebp),%eax addl 12(%ebp),%ebx addl 16(%ebp),%ecx movl %edi,(%ebp) addl $64,%edx movl %esi,4(%ebp) cmpl 104(%esp),%edx movl %eax,8(%ebp) movl %ecx,%edi movl %ebx,12(%ebp) movl %edx,%esi movl %ecx,16(%ebp) jb L000loop addl $76,%esp popl %edi popl %esi popl %ebx popl %ebp ret .globl _sha1_block_data_order_ssse3 .private_extern _sha1_block_data_order_ssse3 .align 4 _sha1_block_data_order_ssse3: L_sha1_block_data_order_ssse3_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi call L001pic_point L001pic_point: popl %ebp leal LK_XX_XX-L001pic_point(%ebp),%ebp movdqa (%ebp),%xmm7 movdqa 16(%ebp),%xmm0 movdqa 32(%ebp),%xmm1 movdqa 48(%ebp),%xmm2 movdqa 64(%ebp),%xmm6 movl 20(%esp),%edi movl 24(%esp),%ebp movl 28(%esp),%edx movl %esp,%esi subl $208,%esp andl $-64,%esp movdqa %xmm0,112(%esp) movdqa %xmm1,128(%esp) movdqa %xmm2,144(%esp) shll $6,%edx movdqa %xmm7,160(%esp) addl %ebp,%edx movdqa %xmm6,176(%esp) addl $64,%ebp movl %edi,192(%esp) movl %ebp,196(%esp) movl %edx,200(%esp) movl %esi,204(%esp) movl (%edi),%eax movl 4(%edi),%ebx movl 8(%edi),%ecx movl 12(%edi),%edx movl 16(%edi),%edi movl %ebx,%esi movdqu -64(%ebp),%xmm0 movdqu -48(%ebp),%xmm1 movdqu -32(%ebp),%xmm2 movdqu -16(%ebp),%xmm3 .byte 102,15,56,0,198 .byte 102,15,56,0,206 .byte 102,15,56,0,214 movdqa %xmm7,96(%esp) .byte 102,15,56,0,222 paddd %xmm7,%xmm0 paddd %xmm7,%xmm1 paddd %xmm7,%xmm2 movdqa %xmm0,(%esp) psubd %xmm7,%xmm0 movdqa %xmm1,16(%esp) psubd %xmm7,%xmm1 movdqa %xmm2,32(%esp) movl %ecx,%ebp psubd %xmm7,%xmm2 xorl %edx,%ebp pshufd $238,%xmm0,%xmm4 andl %ebp,%esi jmp L002loop .align 4,0x90 L002loop: rorl $2,%ebx xorl %edx,%esi movl %eax,%ebp punpcklqdq %xmm1,%xmm4 movdqa %xmm3,%xmm6 addl (%esp),%edi xorl %ecx,%ebx paddd %xmm3,%xmm7 movdqa %xmm0,64(%esp) roll $5,%eax addl %esi,%edi psrldq $4,%xmm6 andl %ebx,%ebp xorl %ecx,%ebx pxor %xmm0,%xmm4 addl %eax,%edi rorl $7,%eax pxor %xmm2,%xmm6 xorl %ecx,%ebp movl %edi,%esi addl 4(%esp),%edx pxor %xmm6,%xmm4 xorl %ebx,%eax roll $5,%edi movdqa %xmm7,48(%esp) addl %ebp,%edx andl %eax,%esi movdqa %xmm4,%xmm0 xorl %ebx,%eax addl %edi,%edx rorl $7,%edi movdqa %xmm4,%xmm6 xorl %ebx,%esi pslldq $12,%xmm0 paddd %xmm4,%xmm4 movl %edx,%ebp addl 8(%esp),%ecx psrld $31,%xmm6 xorl %eax,%edi roll $5,%edx movdqa %xmm0,%xmm7 addl %esi,%ecx andl %edi,%ebp xorl %eax,%edi psrld $30,%xmm0 addl %edx,%ecx rorl $7,%edx por %xmm6,%xmm4 xorl %eax,%ebp movl %ecx,%esi addl 12(%esp),%ebx pslld $2,%xmm7 xorl %edi,%edx roll $5,%ecx pxor %xmm0,%xmm4 movdqa 96(%esp),%xmm0 addl %ebp,%ebx andl %edx,%esi pxor %xmm7,%xmm4 pshufd $238,%xmm1,%xmm5 xorl %edi,%edx addl %ecx,%ebx rorl $7,%ecx xorl %edi,%esi movl %ebx,%ebp punpcklqdq %xmm2,%xmm5 movdqa %xmm4,%xmm7 addl 16(%esp),%eax xorl %edx,%ecx paddd %xmm4,%xmm0 movdqa %xmm1,80(%esp) roll $5,%ebx addl %esi,%eax psrldq $4,%xmm7 andl %ecx,%ebp xorl %edx,%ecx pxor %xmm1,%xmm5 addl %ebx,%eax rorl $7,%ebx pxor %xmm3,%xmm7 xorl %edx,%ebp movl %eax,%esi addl 20(%esp),%edi pxor %xmm7,%xmm5 xorl %ecx,%ebx roll $5,%eax movdqa %xmm0,(%esp) addl %ebp,%edi andl %ebx,%esi movdqa %xmm5,%xmm1 xorl %ecx,%ebx addl %eax,%edi rorl $7,%eax movdqa %xmm5,%xmm7 xorl %ecx,%esi pslldq $12,%xmm1 paddd %xmm5,%xmm5 movl %edi,%ebp addl 24(%esp),%edx psrld $31,%xmm7 xorl %ebx,%eax roll $5,%edi movdqa %xmm1,%xmm0 addl %esi,%edx andl %eax,%ebp xorl %ebx,%eax psrld $30,%xmm1 addl %edi,%edx rorl $7,%edi por %xmm7,%xmm5 xorl %ebx,%ebp movl %edx,%esi addl 28(%esp),%ecx pslld $2,%xmm0 xorl %eax,%edi roll $5,%edx pxor %xmm1,%xmm5 movdqa 112(%esp),%xmm1 addl %ebp,%ecx andl %edi,%esi pxor %xmm0,%xmm5 pshufd $238,%xmm2,%xmm6 xorl %eax,%edi addl %edx,%ecx rorl $7,%edx xorl %eax,%esi movl %ecx,%ebp punpcklqdq %xmm3,%xmm6 movdqa %xmm5,%xmm0 addl 32(%esp),%ebx xorl %edi,%edx paddd %xmm5,%xmm1 movdqa %xmm2,96(%esp) roll $5,%ecx addl %esi,%ebx psrldq $4,%xmm0 andl %edx,%ebp xorl %edi,%edx pxor %xmm2,%xmm6 addl %ecx,%ebx rorl $7,%ecx pxor %xmm4,%xmm0 xorl %edi,%ebp movl %ebx,%esi addl 36(%esp),%eax pxor %xmm0,%xmm6 xorl %edx,%ecx roll $5,%ebx movdqa %xmm1,16(%esp) addl %ebp,%eax andl %ecx,%esi movdqa %xmm6,%xmm2 xorl %edx,%ecx addl %ebx,%eax rorl $7,%ebx movdqa %xmm6,%xmm0 xorl %edx,%esi pslldq $12,%xmm2 paddd %xmm6,%xmm6 movl %eax,%ebp addl 40(%esp),%edi psrld $31,%xmm0 xorl %ecx,%ebx roll $5,%eax movdqa %xmm2,%xmm1 addl %esi,%edi andl %ebx,%ebp xorl %ecx,%ebx psrld $30,%xmm2 addl %eax,%edi rorl $7,%eax por %xmm0,%xmm6 xorl %ecx,%ebp movdqa 64(%esp),%xmm0 movl %edi,%esi addl 44(%esp),%edx pslld $2,%xmm1 xorl %ebx,%eax roll $5,%edi pxor %xmm2,%xmm6 movdqa 112(%esp),%xmm2 addl %ebp,%edx andl %eax,%esi pxor %xmm1,%xmm6 pshufd $238,%xmm3,%xmm7 xorl %ebx,%eax addl %edi,%edx rorl $7,%edi xorl %ebx,%esi movl %edx,%ebp punpcklqdq %xmm4,%xmm7 movdqa %xmm6,%xmm1 addl 48(%esp),%ecx xorl %eax,%edi paddd %xmm6,%xmm2 movdqa %xmm3,64(%esp) roll $5,%edx addl %esi,%ecx psrldq $4,%xmm1 andl %edi,%ebp xorl %eax,%edi pxor %xmm3,%xmm7 addl %edx,%ecx rorl $7,%edx pxor %xmm5,%xmm1 xorl %eax,%ebp movl %ecx,%esi addl 52(%esp),%ebx pxor %xmm1,%xmm7 xorl %edi,%edx roll $5,%ecx movdqa %xmm2,32(%esp) addl %ebp,%ebx andl %edx,%esi movdqa %xmm7,%xmm3 xorl %edi,%edx addl %ecx,%ebx rorl $7,%ecx movdqa %xmm7,%xmm1 xorl %edi,%esi pslldq $12,%xmm3 paddd %xmm7,%xmm7 movl %ebx,%ebp addl 56(%esp),%eax psrld $31,%xmm1 xorl %edx,%ecx roll $5,%ebx movdqa %xmm3,%xmm2 addl %esi,%eax andl %ecx,%ebp xorl %edx,%ecx psrld $30,%xmm3 addl %ebx,%eax rorl $7,%ebx por %xmm1,%xmm7 xorl %edx,%ebp movdqa 80(%esp),%xmm1 movl %eax,%esi addl 60(%esp),%edi pslld $2,%xmm2 xorl %ecx,%ebx roll $5,%eax pxor %xmm3,%xmm7 movdqa 112(%esp),%xmm3 addl %ebp,%edi andl %ebx,%esi pxor %xmm2,%xmm7 pshufd $238,%xmm6,%xmm2 xorl %ecx,%ebx addl %eax,%edi rorl $7,%eax pxor %xmm4,%xmm0 punpcklqdq %xmm7,%xmm2 xorl %ecx,%esi movl %edi,%ebp addl (%esp),%edx pxor %xmm1,%xmm0 movdqa %xmm4,80(%esp) xorl %ebx,%eax roll $5,%edi movdqa %xmm3,%xmm4 addl %esi,%edx paddd %xmm7,%xmm3 andl %eax,%ebp pxor %xmm2,%xmm0 xorl %ebx,%eax addl %edi,%edx rorl $7,%edi xorl %ebx,%ebp movdqa %xmm0,%xmm2 movdqa %xmm3,48(%esp) movl %edx,%esi addl 4(%esp),%ecx xorl %eax,%edi roll $5,%edx pslld $2,%xmm0 addl %ebp,%ecx andl %edi,%esi psrld $30,%xmm2 xorl %eax,%edi addl %edx,%ecx rorl $7,%edx xorl %eax,%esi movl %ecx,%ebp addl 8(%esp),%ebx xorl %edi,%edx roll $5,%ecx por %xmm2,%xmm0 addl %esi,%ebx andl %edx,%ebp movdqa 96(%esp),%xmm2 xorl %edi,%edx addl %ecx,%ebx addl 12(%esp),%eax xorl %edi,%ebp movl %ebx,%esi pshufd $238,%xmm7,%xmm3 roll $5,%ebx addl %ebp,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax addl 16(%esp),%edi pxor %xmm5,%xmm1 punpcklqdq %xmm0,%xmm3 xorl %ecx,%esi movl %eax,%ebp roll $5,%eax pxor %xmm2,%xmm1 movdqa %xmm5,96(%esp) addl %esi,%edi xorl %ecx,%ebp movdqa %xmm4,%xmm5 rorl $7,%ebx paddd %xmm0,%xmm4 addl %eax,%edi pxor %xmm3,%xmm1 addl 20(%esp),%edx xorl %ebx,%ebp movl %edi,%esi roll $5,%edi movdqa %xmm1,%xmm3 movdqa %xmm4,(%esp) addl %ebp,%edx xorl %ebx,%esi rorl $7,%eax addl %edi,%edx pslld $2,%xmm1 addl 24(%esp),%ecx xorl %eax,%esi psrld $30,%xmm3 movl %edx,%ebp roll $5,%edx addl %esi,%ecx xorl %eax,%ebp rorl $7,%edi addl %edx,%ecx por %xmm3,%xmm1 addl 28(%esp),%ebx xorl %edi,%ebp movdqa 64(%esp),%xmm3 movl %ecx,%esi roll $5,%ecx addl %ebp,%ebx xorl %edi,%esi rorl $7,%edx pshufd $238,%xmm0,%xmm4 addl %ecx,%ebx addl 32(%esp),%eax pxor %xmm6,%xmm2 punpcklqdq %xmm1,%xmm4 xorl %edx,%esi movl %ebx,%ebp roll $5,%ebx pxor %xmm3,%xmm2 movdqa %xmm6,64(%esp) addl %esi,%eax xorl %edx,%ebp movdqa 128(%esp),%xmm6 rorl $7,%ecx paddd %xmm1,%xmm5 addl %ebx,%eax pxor %xmm4,%xmm2 addl 36(%esp),%edi xorl %ecx,%ebp movl %eax,%esi roll $5,%eax movdqa %xmm2,%xmm4 movdqa %xmm5,16(%esp) addl %ebp,%edi xorl %ecx,%esi rorl $7,%ebx addl %eax,%edi pslld $2,%xmm2 addl 40(%esp),%edx xorl %ebx,%esi psrld $30,%xmm4 movl %edi,%ebp roll $5,%edi addl %esi,%edx xorl %ebx,%ebp rorl $7,%eax addl %edi,%edx por %xmm4,%xmm2 addl 44(%esp),%ecx xorl %eax,%ebp movdqa 80(%esp),%xmm4 movl %edx,%esi roll $5,%edx addl %ebp,%ecx xorl %eax,%esi rorl $7,%edi pshufd $238,%xmm1,%xmm5 addl %edx,%ecx addl 48(%esp),%ebx pxor %xmm7,%xmm3 punpcklqdq %xmm2,%xmm5 xorl %edi,%esi movl %ecx,%ebp roll $5,%ecx pxor %xmm4,%xmm3 movdqa %xmm7,80(%esp) addl %esi,%ebx xorl %edi,%ebp movdqa %xmm6,%xmm7 rorl $7,%edx paddd %xmm2,%xmm6 addl %ecx,%ebx pxor %xmm5,%xmm3 addl 52(%esp),%eax xorl %edx,%ebp movl %ebx,%esi roll $5,%ebx movdqa %xmm3,%xmm5 movdqa %xmm6,32(%esp) addl %ebp,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax pslld $2,%xmm3 addl 56(%esp),%edi xorl %ecx,%esi psrld $30,%xmm5 movl %eax,%ebp roll $5,%eax addl %esi,%edi xorl %ecx,%ebp rorl $7,%ebx addl %eax,%edi por %xmm5,%xmm3 addl 60(%esp),%edx xorl %ebx,%ebp movdqa 96(%esp),%xmm5 movl %edi,%esi roll $5,%edi addl %ebp,%edx xorl %ebx,%esi rorl $7,%eax pshufd $238,%xmm2,%xmm6 addl %edi,%edx addl (%esp),%ecx pxor %xmm0,%xmm4 punpcklqdq %xmm3,%xmm6 xorl %eax,%esi movl %edx,%ebp roll $5,%edx pxor %xmm5,%xmm4 movdqa %xmm0,96(%esp) addl %esi,%ecx xorl %eax,%ebp movdqa %xmm7,%xmm0 rorl $7,%edi paddd %xmm3,%xmm7 addl %edx,%ecx pxor %xmm6,%xmm4 addl 4(%esp),%ebx xorl %edi,%ebp movl %ecx,%esi roll $5,%ecx movdqa %xmm4,%xmm6 movdqa %xmm7,48(%esp) addl %ebp,%ebx xorl %edi,%esi rorl $7,%edx addl %ecx,%ebx pslld $2,%xmm4 addl 8(%esp),%eax xorl %edx,%esi psrld $30,%xmm6 movl %ebx,%ebp roll $5,%ebx addl %esi,%eax xorl %edx,%ebp rorl $7,%ecx addl %ebx,%eax por %xmm6,%xmm4 addl 12(%esp),%edi xorl %ecx,%ebp movdqa 64(%esp),%xmm6 movl %eax,%esi roll $5,%eax addl %ebp,%edi xorl %ecx,%esi rorl $7,%ebx pshufd $238,%xmm3,%xmm7 addl %eax,%edi addl 16(%esp),%edx pxor %xmm1,%xmm5 punpcklqdq %xmm4,%xmm7 xorl %ebx,%esi movl %edi,%ebp roll $5,%edi pxor %xmm6,%xmm5 movdqa %xmm1,64(%esp) addl %esi,%edx xorl %ebx,%ebp movdqa %xmm0,%xmm1 rorl $7,%eax paddd %xmm4,%xmm0 addl %edi,%edx pxor %xmm7,%xmm5 addl 20(%esp),%ecx xorl %eax,%ebp movl %edx,%esi roll $5,%edx movdqa %xmm5,%xmm7 movdqa %xmm0,(%esp) addl %ebp,%ecx xorl %eax,%esi rorl $7,%edi addl %edx,%ecx pslld $2,%xmm5 addl 24(%esp),%ebx xorl %edi,%esi psrld $30,%xmm7 movl %ecx,%ebp roll $5,%ecx addl %esi,%ebx xorl %edi,%ebp rorl $7,%edx addl %ecx,%ebx por %xmm7,%xmm5 addl 28(%esp),%eax movdqa 80(%esp),%xmm7 rorl $7,%ecx movl %ebx,%esi xorl %edx,%ebp roll $5,%ebx pshufd $238,%xmm4,%xmm0 addl %ebp,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax addl 32(%esp),%edi pxor %xmm2,%xmm6 punpcklqdq %xmm5,%xmm0 andl %ecx,%esi xorl %edx,%ecx rorl $7,%ebx pxor %xmm7,%xmm6 movdqa %xmm2,80(%esp) movl %eax,%ebp xorl %ecx,%esi roll $5,%eax movdqa %xmm1,%xmm2 addl %esi,%edi paddd %xmm5,%xmm1 xorl %ebx,%ebp pxor %xmm0,%xmm6 xorl %ecx,%ebx addl %eax,%edi addl 36(%esp),%edx andl %ebx,%ebp movdqa %xmm6,%xmm0 movdqa %xmm1,16(%esp) xorl %ecx,%ebx rorl $7,%eax movl %edi,%esi xorl %ebx,%ebp roll $5,%edi pslld $2,%xmm6 addl %ebp,%edx xorl %eax,%esi psrld $30,%xmm0 xorl %ebx,%eax addl %edi,%edx addl 40(%esp),%ecx andl %eax,%esi xorl %ebx,%eax rorl $7,%edi por %xmm0,%xmm6 movl %edx,%ebp xorl %eax,%esi movdqa 96(%esp),%xmm0 roll $5,%edx addl %esi,%ecx xorl %edi,%ebp xorl %eax,%edi addl %edx,%ecx pshufd $238,%xmm5,%xmm1 addl 44(%esp),%ebx andl %edi,%ebp xorl %eax,%edi rorl $7,%edx movl %ecx,%esi xorl %edi,%ebp roll $5,%ecx addl %ebp,%ebx xorl %edx,%esi xorl %edi,%edx addl %ecx,%ebx addl 48(%esp),%eax pxor %xmm3,%xmm7 punpcklqdq %xmm6,%xmm1 andl %edx,%esi xorl %edi,%edx rorl $7,%ecx pxor %xmm0,%xmm7 movdqa %xmm3,96(%esp) movl %ebx,%ebp xorl %edx,%esi roll $5,%ebx movdqa 144(%esp),%xmm3 addl %esi,%eax paddd %xmm6,%xmm2 xorl %ecx,%ebp pxor %xmm1,%xmm7 xorl %edx,%ecx addl %ebx,%eax addl 52(%esp),%edi andl %ecx,%ebp movdqa %xmm7,%xmm1 movdqa %xmm2,32(%esp) xorl %edx,%ecx rorl $7,%ebx movl %eax,%esi xorl %ecx,%ebp roll $5,%eax pslld $2,%xmm7 addl %ebp,%edi xorl %ebx,%esi psrld $30,%xmm1 xorl %ecx,%ebx addl %eax,%edi addl 56(%esp),%edx andl %ebx,%esi xorl %ecx,%ebx rorl $7,%eax por %xmm1,%xmm7 movl %edi,%ebp xorl %ebx,%esi movdqa 64(%esp),%xmm1 roll $5,%edi addl %esi,%edx xorl %eax,%ebp xorl %ebx,%eax addl %edi,%edx pshufd $238,%xmm6,%xmm2 addl 60(%esp),%ecx andl %eax,%ebp xorl %ebx,%eax rorl $7,%edi movl %edx,%esi xorl %eax,%ebp roll $5,%edx addl %ebp,%ecx xorl %edi,%esi xorl %eax,%edi addl %edx,%ecx addl (%esp),%ebx pxor %xmm4,%xmm0 punpcklqdq %xmm7,%xmm2 andl %edi,%esi xorl %eax,%edi rorl $7,%edx pxor %xmm1,%xmm0 movdqa %xmm4,64(%esp) movl %ecx,%ebp xorl %edi,%esi roll $5,%ecx movdqa %xmm3,%xmm4 addl %esi,%ebx paddd %xmm7,%xmm3 xorl %edx,%ebp pxor %xmm2,%xmm0 xorl %edi,%edx addl %ecx,%ebx addl 4(%esp),%eax andl %edx,%ebp movdqa %xmm0,%xmm2 movdqa %xmm3,48(%esp) xorl %edi,%edx rorl $7,%ecx movl %ebx,%esi xorl %edx,%ebp roll $5,%ebx pslld $2,%xmm0 addl %ebp,%eax xorl %ecx,%esi psrld $30,%xmm2 xorl %edx,%ecx addl %ebx,%eax addl 8(%esp),%edi andl %ecx,%esi xorl %edx,%ecx rorl $7,%ebx por %xmm2,%xmm0 movl %eax,%ebp xorl %ecx,%esi movdqa 80(%esp),%xmm2 roll $5,%eax addl %esi,%edi xorl %ebx,%ebp xorl %ecx,%ebx addl %eax,%edi pshufd $238,%xmm7,%xmm3 addl 12(%esp),%edx andl %ebx,%ebp xorl %ecx,%ebx rorl $7,%eax movl %edi,%esi xorl %ebx,%ebp roll $5,%edi addl %ebp,%edx xorl %eax,%esi xorl %ebx,%eax addl %edi,%edx addl 16(%esp),%ecx pxor %xmm5,%xmm1 punpcklqdq %xmm0,%xmm3 andl %eax,%esi xorl %ebx,%eax rorl $7,%edi pxor %xmm2,%xmm1 movdqa %xmm5,80(%esp) movl %edx,%ebp xorl %eax,%esi roll $5,%edx movdqa %xmm4,%xmm5 addl %esi,%ecx paddd %xmm0,%xmm4 xorl %edi,%ebp pxor %xmm3,%xmm1 xorl %eax,%edi addl %edx,%ecx addl 20(%esp),%ebx andl %edi,%ebp movdqa %xmm1,%xmm3 movdqa %xmm4,(%esp) xorl %eax,%edi rorl $7,%edx movl %ecx,%esi xorl %edi,%ebp roll $5,%ecx pslld $2,%xmm1 addl %ebp,%ebx xorl %edx,%esi psrld $30,%xmm3 xorl %edi,%edx addl %ecx,%ebx addl 24(%esp),%eax andl %edx,%esi xorl %edi,%edx rorl $7,%ecx por %xmm3,%xmm1 movl %ebx,%ebp xorl %edx,%esi movdqa 96(%esp),%xmm3 roll $5,%ebx addl %esi,%eax xorl %ecx,%ebp xorl %edx,%ecx addl %ebx,%eax pshufd $238,%xmm0,%xmm4 addl 28(%esp),%edi andl %ecx,%ebp xorl %edx,%ecx rorl $7,%ebx movl %eax,%esi xorl %ecx,%ebp roll $5,%eax addl %ebp,%edi xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%edi addl 32(%esp),%edx pxor %xmm6,%xmm2 punpcklqdq %xmm1,%xmm4 andl %ebx,%esi xorl %ecx,%ebx rorl $7,%eax pxor %xmm3,%xmm2 movdqa %xmm6,96(%esp) movl %edi,%ebp xorl %ebx,%esi roll $5,%edi movdqa %xmm5,%xmm6 addl %esi,%edx paddd %xmm1,%xmm5 xorl %eax,%ebp pxor %xmm4,%xmm2 xorl %ebx,%eax addl %edi,%edx addl 36(%esp),%ecx andl %eax,%ebp movdqa %xmm2,%xmm4 movdqa %xmm5,16(%esp) xorl %ebx,%eax rorl $7,%edi movl %edx,%esi xorl %eax,%ebp roll $5,%edx pslld $2,%xmm2 addl %ebp,%ecx xorl %edi,%esi psrld $30,%xmm4 xorl %eax,%edi addl %edx,%ecx addl 40(%esp),%ebx andl %edi,%esi xorl %eax,%edi rorl $7,%edx por %xmm4,%xmm2 movl %ecx,%ebp xorl %edi,%esi movdqa 64(%esp),%xmm4 roll $5,%ecx addl %esi,%ebx xorl %edx,%ebp xorl %edi,%edx addl %ecx,%ebx pshufd $238,%xmm1,%xmm5 addl 44(%esp),%eax andl %edx,%ebp xorl %edi,%edx rorl $7,%ecx movl %ebx,%esi xorl %edx,%ebp roll $5,%ebx addl %ebp,%eax xorl %edx,%esi addl %ebx,%eax addl 48(%esp),%edi pxor %xmm7,%xmm3 punpcklqdq %xmm2,%xmm5 xorl %ecx,%esi movl %eax,%ebp roll $5,%eax pxor %xmm4,%xmm3 movdqa %xmm7,64(%esp) addl %esi,%edi xorl %ecx,%ebp movdqa %xmm6,%xmm7 rorl $7,%ebx paddd %xmm2,%xmm6 addl %eax,%edi pxor %xmm5,%xmm3 addl 52(%esp),%edx xorl %ebx,%ebp movl %edi,%esi roll $5,%edi movdqa %xmm3,%xmm5 movdqa %xmm6,32(%esp) addl %ebp,%edx xorl %ebx,%esi rorl $7,%eax addl %edi,%edx pslld $2,%xmm3 addl 56(%esp),%ecx xorl %eax,%esi psrld $30,%xmm5 movl %edx,%ebp roll $5,%edx addl %esi,%ecx xorl %eax,%ebp rorl $7,%edi addl %edx,%ecx por %xmm5,%xmm3 addl 60(%esp),%ebx xorl %edi,%ebp movl %ecx,%esi roll $5,%ecx addl %ebp,%ebx xorl %edi,%esi rorl $7,%edx addl %ecx,%ebx addl (%esp),%eax xorl %edx,%esi movl %ebx,%ebp roll $5,%ebx addl %esi,%eax xorl %edx,%ebp rorl $7,%ecx paddd %xmm3,%xmm7 addl %ebx,%eax addl 4(%esp),%edi xorl %ecx,%ebp movl %eax,%esi movdqa %xmm7,48(%esp) roll $5,%eax addl %ebp,%edi xorl %ecx,%esi rorl $7,%ebx addl %eax,%edi addl 8(%esp),%edx xorl %ebx,%esi movl %edi,%ebp roll $5,%edi addl %esi,%edx xorl %ebx,%ebp rorl $7,%eax addl %edi,%edx addl 12(%esp),%ecx xorl %eax,%ebp movl %edx,%esi roll $5,%edx addl %ebp,%ecx xorl %eax,%esi rorl $7,%edi addl %edx,%ecx movl 196(%esp),%ebp cmpl 200(%esp),%ebp je L003done movdqa 160(%esp),%xmm7 movdqa 176(%esp),%xmm6 movdqu (%ebp),%xmm0 movdqu 16(%ebp),%xmm1 movdqu 32(%ebp),%xmm2 movdqu 48(%ebp),%xmm3 addl $64,%ebp .byte 102,15,56,0,198 movl %ebp,196(%esp) movdqa %xmm7,96(%esp) addl 16(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp roll $5,%ecx addl %esi,%ebx xorl %edi,%ebp rorl $7,%edx .byte 102,15,56,0,206 addl %ecx,%ebx addl 20(%esp),%eax xorl %edx,%ebp movl %ebx,%esi paddd %xmm7,%xmm0 roll $5,%ebx addl %ebp,%eax xorl %edx,%esi rorl $7,%ecx movdqa %xmm0,(%esp) addl %ebx,%eax addl 24(%esp),%edi xorl %ecx,%esi movl %eax,%ebp psubd %xmm7,%xmm0 roll $5,%eax addl %esi,%edi xorl %ecx,%ebp rorl $7,%ebx addl %eax,%edi addl 28(%esp),%edx xorl %ebx,%ebp movl %edi,%esi roll $5,%edi addl %ebp,%edx xorl %ebx,%esi rorl $7,%eax addl %edi,%edx addl 32(%esp),%ecx xorl %eax,%esi movl %edx,%ebp roll $5,%edx addl %esi,%ecx xorl %eax,%ebp rorl $7,%edi .byte 102,15,56,0,214 addl %edx,%ecx addl 36(%esp),%ebx xorl %edi,%ebp movl %ecx,%esi paddd %xmm7,%xmm1 roll $5,%ecx addl %ebp,%ebx xorl %edi,%esi rorl $7,%edx movdqa %xmm1,16(%esp) addl %ecx,%ebx addl 40(%esp),%eax xorl %edx,%esi movl %ebx,%ebp psubd %xmm7,%xmm1 roll $5,%ebx addl %esi,%eax xorl %edx,%ebp rorl $7,%ecx addl %ebx,%eax addl 44(%esp),%edi xorl %ecx,%ebp movl %eax,%esi roll $5,%eax addl %ebp,%edi xorl %ecx,%esi rorl $7,%ebx addl %eax,%edi addl 48(%esp),%edx xorl %ebx,%esi movl %edi,%ebp roll $5,%edi addl %esi,%edx xorl %ebx,%ebp rorl $7,%eax .byte 102,15,56,0,222 addl %edi,%edx addl 52(%esp),%ecx xorl %eax,%ebp movl %edx,%esi paddd %xmm7,%xmm2 roll $5,%edx addl %ebp,%ecx xorl %eax,%esi rorl $7,%edi movdqa %xmm2,32(%esp) addl %edx,%ecx addl 56(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp psubd %xmm7,%xmm2 roll $5,%ecx addl %esi,%ebx xorl %edi,%ebp rorl $7,%edx addl %ecx,%ebx addl 60(%esp),%eax xorl %edx,%ebp movl %ebx,%esi roll $5,%ebx addl %ebp,%eax rorl $7,%ecx addl %ebx,%eax movl 192(%esp),%ebp addl (%ebp),%eax addl 4(%ebp),%esi addl 8(%ebp),%ecx movl %eax,(%ebp) addl 12(%ebp),%edx movl %esi,4(%ebp) addl 16(%ebp),%edi movl %ecx,8(%ebp) movl %ecx,%ebx movl %edx,12(%ebp) xorl %edx,%ebx movl %edi,16(%ebp) movl %esi,%ebp pshufd $238,%xmm0,%xmm4 andl %ebx,%esi movl %ebp,%ebx jmp L002loop .align 4,0x90 L003done: addl 16(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp roll $5,%ecx addl %esi,%ebx xorl %edi,%ebp rorl $7,%edx addl %ecx,%ebx addl 20(%esp),%eax xorl %edx,%ebp movl %ebx,%esi roll $5,%ebx addl %ebp,%eax xorl %edx,%esi rorl $7,%ecx addl %ebx,%eax addl 24(%esp),%edi xorl %ecx,%esi movl %eax,%ebp roll $5,%eax addl %esi,%edi xorl %ecx,%ebp rorl $7,%ebx addl %eax,%edi addl 28(%esp),%edx xorl %ebx,%ebp movl %edi,%esi roll $5,%edi addl %ebp,%edx xorl %ebx,%esi rorl $7,%eax addl %edi,%edx addl 32(%esp),%ecx xorl %eax,%esi movl %edx,%ebp roll $5,%edx addl %esi,%ecx xorl %eax,%ebp rorl $7,%edi addl %edx,%ecx addl 36(%esp),%ebx xorl %edi,%ebp movl %ecx,%esi roll $5,%ecx addl %ebp,%ebx xorl %edi,%esi rorl $7,%edx addl %ecx,%ebx addl 40(%esp),%eax xorl %edx,%esi movl %ebx,%ebp roll $5,%ebx addl %esi,%eax xorl %edx,%ebp rorl $7,%ecx addl %ebx,%eax addl 44(%esp),%edi xorl %ecx,%ebp movl %eax,%esi roll $5,%eax addl %ebp,%edi xorl %ecx,%esi rorl $7,%ebx addl %eax,%edi addl 48(%esp),%edx xorl %ebx,%esi movl %edi,%ebp roll $5,%edi addl %esi,%edx xorl %ebx,%ebp rorl $7,%eax addl %edi,%edx addl 52(%esp),%ecx xorl %eax,%ebp movl %edx,%esi roll $5,%edx addl %ebp,%ecx xorl %eax,%esi rorl $7,%edi addl %edx,%ecx addl 56(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp roll $5,%ecx addl %esi,%ebx xorl %edi,%ebp rorl $7,%edx addl %ecx,%ebx addl 60(%esp),%eax xorl %edx,%ebp movl %ebx,%esi roll $5,%ebx addl %ebp,%eax rorl $7,%ecx addl %ebx,%eax movl 192(%esp),%ebp addl (%ebp),%eax movl 204(%esp),%esp addl 4(%ebp),%esi addl 8(%ebp),%ecx movl %eax,(%ebp) addl 12(%ebp),%edx movl %esi,4(%ebp) addl 16(%ebp),%edi movl %ecx,8(%ebp) movl %edx,12(%ebp) movl %edi,16(%ebp) popl %edi popl %esi popl %ebx popl %ebp ret .globl _sha1_block_data_order_avx .private_extern _sha1_block_data_order_avx .align 4 _sha1_block_data_order_avx: L_sha1_block_data_order_avx_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi call L004pic_point L004pic_point: popl %ebp leal LK_XX_XX-L004pic_point(%ebp),%ebp vzeroall vmovdqa (%ebp),%xmm7 vmovdqa 16(%ebp),%xmm0 vmovdqa 32(%ebp),%xmm1 vmovdqa 48(%ebp),%xmm2 vmovdqa 64(%ebp),%xmm6 movl 20(%esp),%edi movl 24(%esp),%ebp movl 28(%esp),%edx movl %esp,%esi subl $208,%esp andl $-64,%esp vmovdqa %xmm0,112(%esp) vmovdqa %xmm1,128(%esp) vmovdqa %xmm2,144(%esp) shll $6,%edx vmovdqa %xmm7,160(%esp) addl %ebp,%edx vmovdqa %xmm6,176(%esp) addl $64,%ebp movl %edi,192(%esp) movl %ebp,196(%esp) movl %edx,200(%esp) movl %esi,204(%esp) movl (%edi),%eax movl 4(%edi),%ebx movl 8(%edi),%ecx movl 12(%edi),%edx movl 16(%edi),%edi movl %ebx,%esi vmovdqu -64(%ebp),%xmm0 vmovdqu -48(%ebp),%xmm1 vmovdqu -32(%ebp),%xmm2 vmovdqu -16(%ebp),%xmm3 vpshufb %xmm6,%xmm0,%xmm0 vpshufb %xmm6,%xmm1,%xmm1 vpshufb %xmm6,%xmm2,%xmm2 vmovdqa %xmm7,96(%esp) vpshufb %xmm6,%xmm3,%xmm3 vpaddd %xmm7,%xmm0,%xmm4 vpaddd %xmm7,%xmm1,%xmm5 vpaddd %xmm7,%xmm2,%xmm6 vmovdqa %xmm4,(%esp) movl %ecx,%ebp vmovdqa %xmm5,16(%esp) xorl %edx,%ebp vmovdqa %xmm6,32(%esp) andl %ebp,%esi jmp L005loop .align 4,0x90 L005loop: shrdl $2,%ebx,%ebx xorl %edx,%esi vpalignr $8,%xmm0,%xmm1,%xmm4 movl %eax,%ebp addl (%esp),%edi vpaddd %xmm3,%xmm7,%xmm7 vmovdqa %xmm0,64(%esp) xorl %ecx,%ebx shldl $5,%eax,%eax vpsrldq $4,%xmm3,%xmm6 addl %esi,%edi andl %ebx,%ebp vpxor %xmm0,%xmm4,%xmm4 xorl %ecx,%ebx addl %eax,%edi vpxor %xmm2,%xmm6,%xmm6 shrdl $7,%eax,%eax xorl %ecx,%ebp vmovdqa %xmm7,48(%esp) movl %edi,%esi addl 4(%esp),%edx vpxor %xmm6,%xmm4,%xmm4 xorl %ebx,%eax shldl $5,%edi,%edi addl %ebp,%edx andl %eax,%esi vpsrld $31,%xmm4,%xmm6 xorl %ebx,%eax addl %edi,%edx shrdl $7,%edi,%edi xorl %ebx,%esi vpslldq $12,%xmm4,%xmm0 vpaddd %xmm4,%xmm4,%xmm4 movl %edx,%ebp addl 8(%esp),%ecx xorl %eax,%edi shldl $5,%edx,%edx vpsrld $30,%xmm0,%xmm7 vpor %xmm6,%xmm4,%xmm4 addl %esi,%ecx andl %edi,%ebp xorl %eax,%edi addl %edx,%ecx vpslld $2,%xmm0,%xmm0 shrdl $7,%edx,%edx xorl %eax,%ebp vpxor %xmm7,%xmm4,%xmm4 movl %ecx,%esi addl 12(%esp),%ebx xorl %edi,%edx shldl $5,%ecx,%ecx vpxor %xmm0,%xmm4,%xmm4 addl %ebp,%ebx andl %edx,%esi vmovdqa 96(%esp),%xmm0 xorl %edi,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %edi,%esi vpalignr $8,%xmm1,%xmm2,%xmm5 movl %ebx,%ebp addl 16(%esp),%eax vpaddd %xmm4,%xmm0,%xmm0 vmovdqa %xmm1,80(%esp) xorl %edx,%ecx shldl $5,%ebx,%ebx vpsrldq $4,%xmm4,%xmm7 addl %esi,%eax andl %ecx,%ebp vpxor %xmm1,%xmm5,%xmm5 xorl %edx,%ecx addl %ebx,%eax vpxor %xmm3,%xmm7,%xmm7 shrdl $7,%ebx,%ebx xorl %edx,%ebp vmovdqa %xmm0,(%esp) movl %eax,%esi addl 20(%esp),%edi vpxor %xmm7,%xmm5,%xmm5 xorl %ecx,%ebx shldl $5,%eax,%eax addl %ebp,%edi andl %ebx,%esi vpsrld $31,%xmm5,%xmm7 xorl %ecx,%ebx addl %eax,%edi shrdl $7,%eax,%eax xorl %ecx,%esi vpslldq $12,%xmm5,%xmm1 vpaddd %xmm5,%xmm5,%xmm5 movl %edi,%ebp addl 24(%esp),%edx xorl %ebx,%eax shldl $5,%edi,%edi vpsrld $30,%xmm1,%xmm0 vpor %xmm7,%xmm5,%xmm5 addl %esi,%edx andl %eax,%ebp xorl %ebx,%eax addl %edi,%edx vpslld $2,%xmm1,%xmm1 shrdl $7,%edi,%edi xorl %ebx,%ebp vpxor %xmm0,%xmm5,%xmm5 movl %edx,%esi addl 28(%esp),%ecx xorl %eax,%edi shldl $5,%edx,%edx vpxor %xmm1,%xmm5,%xmm5 addl %ebp,%ecx andl %edi,%esi vmovdqa 112(%esp),%xmm1 xorl %eax,%edi addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi vpalignr $8,%xmm2,%xmm3,%xmm6 movl %ecx,%ebp addl 32(%esp),%ebx vpaddd %xmm5,%xmm1,%xmm1 vmovdqa %xmm2,96(%esp) xorl %edi,%edx shldl $5,%ecx,%ecx vpsrldq $4,%xmm5,%xmm0 addl %esi,%ebx andl %edx,%ebp vpxor %xmm2,%xmm6,%xmm6 xorl %edi,%edx addl %ecx,%ebx vpxor %xmm4,%xmm0,%xmm0 shrdl $7,%ecx,%ecx xorl %edi,%ebp vmovdqa %xmm1,16(%esp) movl %ebx,%esi addl 36(%esp),%eax vpxor %xmm0,%xmm6,%xmm6 xorl %edx,%ecx shldl $5,%ebx,%ebx addl %ebp,%eax andl %ecx,%esi vpsrld $31,%xmm6,%xmm0 xorl %edx,%ecx addl %ebx,%eax shrdl $7,%ebx,%ebx xorl %edx,%esi vpslldq $12,%xmm6,%xmm2 vpaddd %xmm6,%xmm6,%xmm6 movl %eax,%ebp addl 40(%esp),%edi xorl %ecx,%ebx shldl $5,%eax,%eax vpsrld $30,%xmm2,%xmm1 vpor %xmm0,%xmm6,%xmm6 addl %esi,%edi andl %ebx,%ebp xorl %ecx,%ebx addl %eax,%edi vpslld $2,%xmm2,%xmm2 vmovdqa 64(%esp),%xmm0 shrdl $7,%eax,%eax xorl %ecx,%ebp vpxor %xmm1,%xmm6,%xmm6 movl %edi,%esi addl 44(%esp),%edx xorl %ebx,%eax shldl $5,%edi,%edi vpxor %xmm2,%xmm6,%xmm6 addl %ebp,%edx andl %eax,%esi vmovdqa 112(%esp),%xmm2 xorl %ebx,%eax addl %edi,%edx shrdl $7,%edi,%edi xorl %ebx,%esi vpalignr $8,%xmm3,%xmm4,%xmm7 movl %edx,%ebp addl 48(%esp),%ecx vpaddd %xmm6,%xmm2,%xmm2 vmovdqa %xmm3,64(%esp) xorl %eax,%edi shldl $5,%edx,%edx vpsrldq $4,%xmm6,%xmm1 addl %esi,%ecx andl %edi,%ebp vpxor %xmm3,%xmm7,%xmm7 xorl %eax,%edi addl %edx,%ecx vpxor %xmm5,%xmm1,%xmm1 shrdl $7,%edx,%edx xorl %eax,%ebp vmovdqa %xmm2,32(%esp) movl %ecx,%esi addl 52(%esp),%ebx vpxor %xmm1,%xmm7,%xmm7 xorl %edi,%edx shldl $5,%ecx,%ecx addl %ebp,%ebx andl %edx,%esi vpsrld $31,%xmm7,%xmm1 xorl %edi,%edx addl %ecx,%ebx shrdl $7,%ecx,%ecx xorl %edi,%esi vpslldq $12,%xmm7,%xmm3 vpaddd %xmm7,%xmm7,%xmm7 movl %ebx,%ebp addl 56(%esp),%eax xorl %edx,%ecx shldl $5,%ebx,%ebx vpsrld $30,%xmm3,%xmm2 vpor %xmm1,%xmm7,%xmm7 addl %esi,%eax andl %ecx,%ebp xorl %edx,%ecx addl %ebx,%eax vpslld $2,%xmm3,%xmm3 vmovdqa 80(%esp),%xmm1 shrdl $7,%ebx,%ebx xorl %edx,%ebp vpxor %xmm2,%xmm7,%xmm7 movl %eax,%esi addl 60(%esp),%edi xorl %ecx,%ebx shldl $5,%eax,%eax vpxor %xmm3,%xmm7,%xmm7 addl %ebp,%edi andl %ebx,%esi vmovdqa 112(%esp),%xmm3 xorl %ecx,%ebx addl %eax,%edi vpalignr $8,%xmm6,%xmm7,%xmm2 vpxor %xmm4,%xmm0,%xmm0 shrdl $7,%eax,%eax xorl %ecx,%esi movl %edi,%ebp addl (%esp),%edx vpxor %xmm1,%xmm0,%xmm0 vmovdqa %xmm4,80(%esp) xorl %ebx,%eax shldl $5,%edi,%edi vmovdqa %xmm3,%xmm4 vpaddd %xmm7,%xmm3,%xmm3 addl %esi,%edx andl %eax,%ebp vpxor %xmm2,%xmm0,%xmm0 xorl %ebx,%eax addl %edi,%edx shrdl $7,%edi,%edi xorl %ebx,%ebp vpsrld $30,%xmm0,%xmm2 vmovdqa %xmm3,48(%esp) movl %edx,%esi addl 4(%esp),%ecx xorl %eax,%edi shldl $5,%edx,%edx vpslld $2,%xmm0,%xmm0 addl %ebp,%ecx andl %edi,%esi xorl %eax,%edi addl %edx,%ecx shrdl $7,%edx,%edx xorl %eax,%esi movl %ecx,%ebp addl 8(%esp),%ebx vpor %xmm2,%xmm0,%xmm0 xorl %edi,%edx shldl $5,%ecx,%ecx vmovdqa 96(%esp),%xmm2 addl %esi,%ebx andl %edx,%ebp xorl %edi,%edx addl %ecx,%ebx addl 12(%esp),%eax xorl %edi,%ebp movl %ebx,%esi shldl $5,%ebx,%ebx addl %ebp,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpalignr $8,%xmm7,%xmm0,%xmm3 vpxor %xmm5,%xmm1,%xmm1 addl 16(%esp),%edi xorl %ecx,%esi movl %eax,%ebp shldl $5,%eax,%eax vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm5,96(%esp) addl %esi,%edi xorl %ecx,%ebp vmovdqa %xmm4,%xmm5 vpaddd %xmm0,%xmm4,%xmm4 shrdl $7,%ebx,%ebx addl %eax,%edi vpxor %xmm3,%xmm1,%xmm1 addl 20(%esp),%edx xorl %ebx,%ebp movl %edi,%esi shldl $5,%edi,%edi vpsrld $30,%xmm1,%xmm3 vmovdqa %xmm4,(%esp) addl %ebp,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %edi,%edx vpslld $2,%xmm1,%xmm1 addl 24(%esp),%ecx xorl %eax,%esi movl %edx,%ebp shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%ebp shrdl $7,%edi,%edi addl %edx,%ecx vpor %xmm3,%xmm1,%xmm1 addl 28(%esp),%ebx xorl %edi,%ebp vmovdqa 64(%esp),%xmm3 movl %ecx,%esi shldl $5,%ecx,%ecx addl %ebp,%ebx xorl %edi,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpalignr $8,%xmm0,%xmm1,%xmm4 vpxor %xmm6,%xmm2,%xmm2 addl 32(%esp),%eax xorl %edx,%esi movl %ebx,%ebp shldl $5,%ebx,%ebx vpxor %xmm3,%xmm2,%xmm2 vmovdqa %xmm6,64(%esp) addl %esi,%eax xorl %edx,%ebp vmovdqa 128(%esp),%xmm6 vpaddd %xmm1,%xmm5,%xmm5 shrdl $7,%ecx,%ecx addl %ebx,%eax vpxor %xmm4,%xmm2,%xmm2 addl 36(%esp),%edi xorl %ecx,%ebp movl %eax,%esi shldl $5,%eax,%eax vpsrld $30,%xmm2,%xmm4 vmovdqa %xmm5,16(%esp) addl %ebp,%edi xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%edi vpslld $2,%xmm2,%xmm2 addl 40(%esp),%edx xorl %ebx,%esi movl %edi,%ebp shldl $5,%edi,%edi addl %esi,%edx xorl %ebx,%ebp shrdl $7,%eax,%eax addl %edi,%edx vpor %xmm4,%xmm2,%xmm2 addl 44(%esp),%ecx xorl %eax,%ebp vmovdqa 80(%esp),%xmm4 movl %edx,%esi shldl $5,%edx,%edx addl %ebp,%ecx xorl %eax,%esi shrdl $7,%edi,%edi addl %edx,%ecx vpalignr $8,%xmm1,%xmm2,%xmm5 vpxor %xmm7,%xmm3,%xmm3 addl 48(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp shldl $5,%ecx,%ecx vpxor %xmm4,%xmm3,%xmm3 vmovdqa %xmm7,80(%esp) addl %esi,%ebx xorl %edi,%ebp vmovdqa %xmm6,%xmm7 vpaddd %xmm2,%xmm6,%xmm6 shrdl $7,%edx,%edx addl %ecx,%ebx vpxor %xmm5,%xmm3,%xmm3 addl 52(%esp),%eax xorl %edx,%ebp movl %ebx,%esi shldl $5,%ebx,%ebx vpsrld $30,%xmm3,%xmm5 vmovdqa %xmm6,32(%esp) addl %ebp,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax vpslld $2,%xmm3,%xmm3 addl 56(%esp),%edi xorl %ecx,%esi movl %eax,%ebp shldl $5,%eax,%eax addl %esi,%edi xorl %ecx,%ebp shrdl $7,%ebx,%ebx addl %eax,%edi vpor %xmm5,%xmm3,%xmm3 addl 60(%esp),%edx xorl %ebx,%ebp vmovdqa 96(%esp),%xmm5 movl %edi,%esi shldl $5,%edi,%edi addl %ebp,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %edi,%edx vpalignr $8,%xmm2,%xmm3,%xmm6 vpxor %xmm0,%xmm4,%xmm4 addl (%esp),%ecx xorl %eax,%esi movl %edx,%ebp shldl $5,%edx,%edx vpxor %xmm5,%xmm4,%xmm4 vmovdqa %xmm0,96(%esp) addl %esi,%ecx xorl %eax,%ebp vmovdqa %xmm7,%xmm0 vpaddd %xmm3,%xmm7,%xmm7 shrdl $7,%edi,%edi addl %edx,%ecx vpxor %xmm6,%xmm4,%xmm4 addl 4(%esp),%ebx xorl %edi,%ebp movl %ecx,%esi shldl $5,%ecx,%ecx vpsrld $30,%xmm4,%xmm6 vmovdqa %xmm7,48(%esp) addl %ebp,%ebx xorl %edi,%esi shrdl $7,%edx,%edx addl %ecx,%ebx vpslld $2,%xmm4,%xmm4 addl 8(%esp),%eax xorl %edx,%esi movl %ebx,%ebp shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%ebp shrdl $7,%ecx,%ecx addl %ebx,%eax vpor %xmm6,%xmm4,%xmm4 addl 12(%esp),%edi xorl %ecx,%ebp vmovdqa 64(%esp),%xmm6 movl %eax,%esi shldl $5,%eax,%eax addl %ebp,%edi xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%edi vpalignr $8,%xmm3,%xmm4,%xmm7 vpxor %xmm1,%xmm5,%xmm5 addl 16(%esp),%edx xorl %ebx,%esi movl %edi,%ebp shldl $5,%edi,%edi vpxor %xmm6,%xmm5,%xmm5 vmovdqa %xmm1,64(%esp) addl %esi,%edx xorl %ebx,%ebp vmovdqa %xmm0,%xmm1 vpaddd %xmm4,%xmm0,%xmm0 shrdl $7,%eax,%eax addl %edi,%edx vpxor %xmm7,%xmm5,%xmm5 addl 20(%esp),%ecx xorl %eax,%ebp movl %edx,%esi shldl $5,%edx,%edx vpsrld $30,%xmm5,%xmm7 vmovdqa %xmm0,(%esp) addl %ebp,%ecx xorl %eax,%esi shrdl $7,%edi,%edi addl %edx,%ecx vpslld $2,%xmm5,%xmm5 addl 24(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edi,%ebp shrdl $7,%edx,%edx addl %ecx,%ebx vpor %xmm7,%xmm5,%xmm5 addl 28(%esp),%eax vmovdqa 80(%esp),%xmm7 shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%ebp shldl $5,%ebx,%ebx addl %ebp,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax vpalignr $8,%xmm4,%xmm5,%xmm0 vpxor %xmm2,%xmm6,%xmm6 addl 32(%esp),%edi andl %ecx,%esi xorl %edx,%ecx shrdl $7,%ebx,%ebx vpxor %xmm7,%xmm6,%xmm6 vmovdqa %xmm2,80(%esp) movl %eax,%ebp xorl %ecx,%esi vmovdqa %xmm1,%xmm2 vpaddd %xmm5,%xmm1,%xmm1 shldl $5,%eax,%eax addl %esi,%edi vpxor %xmm0,%xmm6,%xmm6 xorl %ebx,%ebp xorl %ecx,%ebx addl %eax,%edi addl 36(%esp),%edx vpsrld $30,%xmm6,%xmm0 vmovdqa %xmm1,16(%esp) andl %ebx,%ebp xorl %ecx,%ebx shrdl $7,%eax,%eax movl %edi,%esi vpslld $2,%xmm6,%xmm6 xorl %ebx,%ebp shldl $5,%edi,%edi addl %ebp,%edx xorl %eax,%esi xorl %ebx,%eax addl %edi,%edx addl 40(%esp),%ecx andl %eax,%esi vpor %xmm0,%xmm6,%xmm6 xorl %ebx,%eax shrdl $7,%edi,%edi vmovdqa 96(%esp),%xmm0 movl %edx,%ebp xorl %eax,%esi shldl $5,%edx,%edx addl %esi,%ecx xorl %edi,%ebp xorl %eax,%edi addl %edx,%ecx addl 44(%esp),%ebx andl %edi,%ebp xorl %eax,%edi shrdl $7,%edx,%edx movl %ecx,%esi xorl %edi,%ebp shldl $5,%ecx,%ecx addl %ebp,%ebx xorl %edx,%esi xorl %edi,%edx addl %ecx,%ebx vpalignr $8,%xmm5,%xmm6,%xmm1 vpxor %xmm3,%xmm7,%xmm7 addl 48(%esp),%eax andl %edx,%esi xorl %edi,%edx shrdl $7,%ecx,%ecx vpxor %xmm0,%xmm7,%xmm7 vmovdqa %xmm3,96(%esp) movl %ebx,%ebp xorl %edx,%esi vmovdqa 144(%esp),%xmm3 vpaddd %xmm6,%xmm2,%xmm2 shldl $5,%ebx,%ebx addl %esi,%eax vpxor %xmm1,%xmm7,%xmm7 xorl %ecx,%ebp xorl %edx,%ecx addl %ebx,%eax addl 52(%esp),%edi vpsrld $30,%xmm7,%xmm1 vmovdqa %xmm2,32(%esp) andl %ecx,%ebp xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi vpslld $2,%xmm7,%xmm7 xorl %ecx,%ebp shldl $5,%eax,%eax addl %ebp,%edi xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%edi addl 56(%esp),%edx andl %ebx,%esi vpor %xmm1,%xmm7,%xmm7 xorl %ecx,%ebx shrdl $7,%eax,%eax vmovdqa 64(%esp),%xmm1 movl %edi,%ebp xorl %ebx,%esi shldl $5,%edi,%edi addl %esi,%edx xorl %eax,%ebp xorl %ebx,%eax addl %edi,%edx addl 60(%esp),%ecx andl %eax,%ebp xorl %ebx,%eax shrdl $7,%edi,%edi movl %edx,%esi xorl %eax,%ebp shldl $5,%edx,%edx addl %ebp,%ecx xorl %edi,%esi xorl %eax,%edi addl %edx,%ecx vpalignr $8,%xmm6,%xmm7,%xmm2 vpxor %xmm4,%xmm0,%xmm0 addl (%esp),%ebx andl %edi,%esi xorl %eax,%edi shrdl $7,%edx,%edx vpxor %xmm1,%xmm0,%xmm0 vmovdqa %xmm4,64(%esp) movl %ecx,%ebp xorl %edi,%esi vmovdqa %xmm3,%xmm4 vpaddd %xmm7,%xmm3,%xmm3 shldl $5,%ecx,%ecx addl %esi,%ebx vpxor %xmm2,%xmm0,%xmm0 xorl %edx,%ebp xorl %edi,%edx addl %ecx,%ebx addl 4(%esp),%eax vpsrld $30,%xmm0,%xmm2 vmovdqa %xmm3,48(%esp) andl %edx,%ebp xorl %edi,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi vpslld $2,%xmm0,%xmm0 xorl %edx,%ebp shldl $5,%ebx,%ebx addl %ebp,%eax xorl %ecx,%esi xorl %edx,%ecx addl %ebx,%eax addl 8(%esp),%edi andl %ecx,%esi vpor %xmm2,%xmm0,%xmm0 xorl %edx,%ecx shrdl $7,%ebx,%ebx vmovdqa 80(%esp),%xmm2 movl %eax,%ebp xorl %ecx,%esi shldl $5,%eax,%eax addl %esi,%edi xorl %ebx,%ebp xorl %ecx,%ebx addl %eax,%edi addl 12(%esp),%edx andl %ebx,%ebp xorl %ecx,%ebx shrdl $7,%eax,%eax movl %edi,%esi xorl %ebx,%ebp shldl $5,%edi,%edi addl %ebp,%edx xorl %eax,%esi xorl %ebx,%eax addl %edi,%edx vpalignr $8,%xmm7,%xmm0,%xmm3 vpxor %xmm5,%xmm1,%xmm1 addl 16(%esp),%ecx andl %eax,%esi xorl %ebx,%eax shrdl $7,%edi,%edi vpxor %xmm2,%xmm1,%xmm1 vmovdqa %xmm5,80(%esp) movl %edx,%ebp xorl %eax,%esi vmovdqa %xmm4,%xmm5 vpaddd %xmm0,%xmm4,%xmm4 shldl $5,%edx,%edx addl %esi,%ecx vpxor %xmm3,%xmm1,%xmm1 xorl %edi,%ebp xorl %eax,%edi addl %edx,%ecx addl 20(%esp),%ebx vpsrld $30,%xmm1,%xmm3 vmovdqa %xmm4,(%esp) andl %edi,%ebp xorl %eax,%edi shrdl $7,%edx,%edx movl %ecx,%esi vpslld $2,%xmm1,%xmm1 xorl %edi,%ebp shldl $5,%ecx,%ecx addl %ebp,%ebx xorl %edx,%esi xorl %edi,%edx addl %ecx,%ebx addl 24(%esp),%eax andl %edx,%esi vpor %xmm3,%xmm1,%xmm1 xorl %edi,%edx shrdl $7,%ecx,%ecx vmovdqa 96(%esp),%xmm3 movl %ebx,%ebp xorl %edx,%esi shldl $5,%ebx,%ebx addl %esi,%eax xorl %ecx,%ebp xorl %edx,%ecx addl %ebx,%eax addl 28(%esp),%edi andl %ecx,%ebp xorl %edx,%ecx shrdl $7,%ebx,%ebx movl %eax,%esi xorl %ecx,%ebp shldl $5,%eax,%eax addl %ebp,%edi xorl %ebx,%esi xorl %ecx,%ebx addl %eax,%edi vpalignr $8,%xmm0,%xmm1,%xmm4 vpxor %xmm6,%xmm2,%xmm2 addl 32(%esp),%edx andl %ebx,%esi xorl %ecx,%ebx shrdl $7,%eax,%eax vpxor %xmm3,%xmm2,%xmm2 vmovdqa %xmm6,96(%esp) movl %edi,%ebp xorl %ebx,%esi vmovdqa %xmm5,%xmm6 vpaddd %xmm1,%xmm5,%xmm5 shldl $5,%edi,%edi addl %esi,%edx vpxor %xmm4,%xmm2,%xmm2 xorl %eax,%ebp xorl %ebx,%eax addl %edi,%edx addl 36(%esp),%ecx vpsrld $30,%xmm2,%xmm4 vmovdqa %xmm5,16(%esp) andl %eax,%ebp xorl %ebx,%eax shrdl $7,%edi,%edi movl %edx,%esi vpslld $2,%xmm2,%xmm2 xorl %eax,%ebp shldl $5,%edx,%edx addl %ebp,%ecx xorl %edi,%esi xorl %eax,%edi addl %edx,%ecx addl 40(%esp),%ebx andl %edi,%esi vpor %xmm4,%xmm2,%xmm2 xorl %eax,%edi shrdl $7,%edx,%edx vmovdqa 64(%esp),%xmm4 movl %ecx,%ebp xorl %edi,%esi shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edx,%ebp xorl %edi,%edx addl %ecx,%ebx addl 44(%esp),%eax andl %edx,%ebp xorl %edi,%edx shrdl $7,%ecx,%ecx movl %ebx,%esi xorl %edx,%ebp shldl $5,%ebx,%ebx addl %ebp,%eax xorl %edx,%esi addl %ebx,%eax vpalignr $8,%xmm1,%xmm2,%xmm5 vpxor %xmm7,%xmm3,%xmm3 addl 48(%esp),%edi xorl %ecx,%esi movl %eax,%ebp shldl $5,%eax,%eax vpxor %xmm4,%xmm3,%xmm3 vmovdqa %xmm7,64(%esp) addl %esi,%edi xorl %ecx,%ebp vmovdqa %xmm6,%xmm7 vpaddd %xmm2,%xmm6,%xmm6 shrdl $7,%ebx,%ebx addl %eax,%edi vpxor %xmm5,%xmm3,%xmm3 addl 52(%esp),%edx xorl %ebx,%ebp movl %edi,%esi shldl $5,%edi,%edi vpsrld $30,%xmm3,%xmm5 vmovdqa %xmm6,32(%esp) addl %ebp,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %edi,%edx vpslld $2,%xmm3,%xmm3 addl 56(%esp),%ecx xorl %eax,%esi movl %edx,%ebp shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%ebp shrdl $7,%edi,%edi addl %edx,%ecx vpor %xmm5,%xmm3,%xmm3 addl 60(%esp),%ebx xorl %edi,%ebp movl %ecx,%esi shldl $5,%ecx,%ecx addl %ebp,%ebx xorl %edi,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl (%esp),%eax vpaddd %xmm3,%xmm7,%xmm7 xorl %edx,%esi movl %ebx,%ebp shldl $5,%ebx,%ebx addl %esi,%eax vmovdqa %xmm7,48(%esp) xorl %edx,%ebp shrdl $7,%ecx,%ecx addl %ebx,%eax addl 4(%esp),%edi xorl %ecx,%ebp movl %eax,%esi shldl $5,%eax,%eax addl %ebp,%edi xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%edi addl 8(%esp),%edx xorl %ebx,%esi movl %edi,%ebp shldl $5,%edi,%edi addl %esi,%edx xorl %ebx,%ebp shrdl $7,%eax,%eax addl %edi,%edx addl 12(%esp),%ecx xorl %eax,%ebp movl %edx,%esi shldl $5,%edx,%edx addl %ebp,%ecx xorl %eax,%esi shrdl $7,%edi,%edi addl %edx,%ecx movl 196(%esp),%ebp cmpl 200(%esp),%ebp je L006done vmovdqa 160(%esp),%xmm7 vmovdqa 176(%esp),%xmm6 vmovdqu (%ebp),%xmm0 vmovdqu 16(%ebp),%xmm1 vmovdqu 32(%ebp),%xmm2 vmovdqu 48(%ebp),%xmm3 addl $64,%ebp vpshufb %xmm6,%xmm0,%xmm0 movl %ebp,196(%esp) vmovdqa %xmm7,96(%esp) addl 16(%esp),%ebx xorl %edi,%esi vpshufb %xmm6,%xmm1,%xmm1 movl %ecx,%ebp shldl $5,%ecx,%ecx vpaddd %xmm7,%xmm0,%xmm4 addl %esi,%ebx xorl %edi,%ebp shrdl $7,%edx,%edx addl %ecx,%ebx vmovdqa %xmm4,(%esp) addl 20(%esp),%eax xorl %edx,%ebp movl %ebx,%esi shldl $5,%ebx,%ebx addl %ebp,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%esp),%edi xorl %ecx,%esi movl %eax,%ebp shldl $5,%eax,%eax addl %esi,%edi xorl %ecx,%ebp shrdl $7,%ebx,%ebx addl %eax,%edi addl 28(%esp),%edx xorl %ebx,%ebp movl %edi,%esi shldl $5,%edi,%edi addl %ebp,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %edi,%edx addl 32(%esp),%ecx xorl %eax,%esi vpshufb %xmm6,%xmm2,%xmm2 movl %edx,%ebp shldl $5,%edx,%edx vpaddd %xmm7,%xmm1,%xmm5 addl %esi,%ecx xorl %eax,%ebp shrdl $7,%edi,%edi addl %edx,%ecx vmovdqa %xmm5,16(%esp) addl 36(%esp),%ebx xorl %edi,%ebp movl %ecx,%esi shldl $5,%ecx,%ecx addl %ebp,%ebx xorl %edi,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%esp),%eax xorl %edx,%esi movl %ebx,%ebp shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%ebp shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%esp),%edi xorl %ecx,%ebp movl %eax,%esi shldl $5,%eax,%eax addl %ebp,%edi xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%edi addl 48(%esp),%edx xorl %ebx,%esi vpshufb %xmm6,%xmm3,%xmm3 movl %edi,%ebp shldl $5,%edi,%edi vpaddd %xmm7,%xmm2,%xmm6 addl %esi,%edx xorl %ebx,%ebp shrdl $7,%eax,%eax addl %edi,%edx vmovdqa %xmm6,32(%esp) addl 52(%esp),%ecx xorl %eax,%ebp movl %edx,%esi shldl $5,%edx,%edx addl %ebp,%ecx xorl %eax,%esi shrdl $7,%edi,%edi addl %edx,%ecx addl 56(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edi,%ebp shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%esp),%eax xorl %edx,%ebp movl %ebx,%esi shldl $5,%ebx,%ebx addl %ebp,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax movl 192(%esp),%ebp addl (%ebp),%eax addl 4(%ebp),%esi addl 8(%ebp),%ecx movl %eax,(%ebp) addl 12(%ebp),%edx movl %esi,4(%ebp) addl 16(%ebp),%edi movl %ecx,%ebx movl %ecx,8(%ebp) xorl %edx,%ebx movl %edx,12(%ebp) movl %edi,16(%ebp) movl %esi,%ebp andl %ebx,%esi movl %ebp,%ebx jmp L005loop .align 4,0x90 L006done: addl 16(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edi,%ebp shrdl $7,%edx,%edx addl %ecx,%ebx addl 20(%esp),%eax xorl %edx,%ebp movl %ebx,%esi shldl $5,%ebx,%ebx addl %ebp,%eax xorl %edx,%esi shrdl $7,%ecx,%ecx addl %ebx,%eax addl 24(%esp),%edi xorl %ecx,%esi movl %eax,%ebp shldl $5,%eax,%eax addl %esi,%edi xorl %ecx,%ebp shrdl $7,%ebx,%ebx addl %eax,%edi addl 28(%esp),%edx xorl %ebx,%ebp movl %edi,%esi shldl $5,%edi,%edi addl %ebp,%edx xorl %ebx,%esi shrdl $7,%eax,%eax addl %edi,%edx addl 32(%esp),%ecx xorl %eax,%esi movl %edx,%ebp shldl $5,%edx,%edx addl %esi,%ecx xorl %eax,%ebp shrdl $7,%edi,%edi addl %edx,%ecx addl 36(%esp),%ebx xorl %edi,%ebp movl %ecx,%esi shldl $5,%ecx,%ecx addl %ebp,%ebx xorl %edi,%esi shrdl $7,%edx,%edx addl %ecx,%ebx addl 40(%esp),%eax xorl %edx,%esi movl %ebx,%ebp shldl $5,%ebx,%ebx addl %esi,%eax xorl %edx,%ebp shrdl $7,%ecx,%ecx addl %ebx,%eax addl 44(%esp),%edi xorl %ecx,%ebp movl %eax,%esi shldl $5,%eax,%eax addl %ebp,%edi xorl %ecx,%esi shrdl $7,%ebx,%ebx addl %eax,%edi addl 48(%esp),%edx xorl %ebx,%esi movl %edi,%ebp shldl $5,%edi,%edi addl %esi,%edx xorl %ebx,%ebp shrdl $7,%eax,%eax addl %edi,%edx addl 52(%esp),%ecx xorl %eax,%ebp movl %edx,%esi shldl $5,%edx,%edx addl %ebp,%ecx xorl %eax,%esi shrdl $7,%edi,%edi addl %edx,%ecx addl 56(%esp),%ebx xorl %edi,%esi movl %ecx,%ebp shldl $5,%ecx,%ecx addl %esi,%ebx xorl %edi,%ebp shrdl $7,%edx,%edx addl %ecx,%ebx addl 60(%esp),%eax xorl %edx,%ebp movl %ebx,%esi shldl $5,%ebx,%ebx addl %ebp,%eax shrdl $7,%ecx,%ecx addl %ebx,%eax vzeroall movl 192(%esp),%ebp addl (%ebp),%eax movl 204(%esp),%esp addl 4(%ebp),%esi addl 8(%ebp),%ecx movl %eax,(%ebp) addl 12(%ebp),%edx movl %esi,4(%ebp) addl 16(%ebp),%edi movl %ecx,8(%ebp) movl %edx,12(%ebp) movl %edi,16(%ebp) popl %edi popl %esi popl %ebx popl %ebp ret .align 6,0x90 LK_XX_XX: .long 1518500249,1518500249,1518500249,1518500249 .long 1859775393,1859775393,1859775393,1859775393 .long 2400959708,2400959708,2400959708,2400959708 .long 3395469782,3395469782,3395469782,3395469782 .long 66051,67438087,134810123,202182159 .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115 .byte 102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82 .byte 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112 .byte 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 #endif // !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__)
marvin-hansen/iggy-streaming-system
15,187
thirdparty/crates/aws-lc-sys-0.25.1/aws-lc/generated-src/mac-x86/crypto/fipsmodule/vpaes-x86.S
// This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #include <openssl/asm_base.h> #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__) .text #ifdef BORINGSSL_DISPATCH_TEST #endif .align 6,0x90 L_vpaes_consts: .long 218628480,235210255,168496130,67568393 .long 252381056,17041926,33884169,51187212 .long 252645135,252645135,252645135,252645135 .long 1512730624,3266504856,1377990664,3401244816 .long 830229760,1275146365,2969422977,3447763452 .long 3411033600,2979783055,338359620,2782886510 .long 4209124096,907596821,221174255,1006095553 .long 191964160,3799684038,3164090317,1589111125 .long 182528256,1777043520,2877432650,3265356744 .long 1874708224,3503451415,3305285752,363511674 .long 1606117888,3487855781,1093350906,2384367825 .long 197121,67569157,134941193,202313229 .long 67569157,134941193,202313229,197121 .long 134941193,202313229,197121,67569157 .long 202313229,197121,67569157,134941193 .long 33619971,100992007,168364043,235736079 .long 235736079,33619971,100992007,168364043 .long 168364043,235736079,33619971,100992007 .long 100992007,168364043,235736079,33619971 .long 50462976,117835012,185207048,252579084 .long 252314880,51251460,117574920,184942860 .long 184682752,252054788,50987272,118359308 .long 118099200,185467140,251790600,50727180 .long 2946363062,528716217,1300004225,1881839624 .long 1532713819,1532713819,1532713819,1532713819 .long 3602276352,4288629033,3737020424,4153884961 .long 1354558464,32357713,2958822624,3775749553 .long 1201988352,132424512,1572796698,503232858 .long 2213177600,1597421020,4103937655,675398315 .long 2749646592,4273543773,1511898873,121693092 .long 3040248576,1103263732,2871565598,1608280554 .long 2236667136,2588920351,482954393,64377734 .long 3069987328,291237287,2117370568,3650299247 .long 533321216,3573750986,2572112006,1401264716 .long 1339849704,2721158661,548607111,3445553514 .long 2128193280,3054596040,2183486460,1257083700 .long 655635200,1165381986,3923443150,2344132524 .long 190078720,256924420,290342170,357187870 .long 1610966272,2263057382,4103205268,309794674 .long 2592527872,2233205587,1335446729,3402964816 .long 3973531904,3225098121,3002836325,1918774430 .long 3870401024,2102906079,2284471353,4117666579 .long 617007872,1021508343,366931923,691083277 .long 2528395776,3491914898,2968704004,1613121270 .long 3445188352,3247741094,844474987,4093578302 .long 651481088,1190302358,1689581232,574775300 .long 4289380608,206939853,2555985458,2489840491 .long 2130264064,327674451,3566485037,3349835193 .long 2470714624,316102159,3636825756,3393945945 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105 .byte 111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83 .byte 83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117 .byte 114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105 .byte 118,101,114,115,105,116,121,41,0 .align 6,0x90 .private_extern __vpaes_preheat .align 4 __vpaes_preheat: addl (%esp),%ebp movdqa -48(%ebp),%xmm7 movdqa -16(%ebp),%xmm6 ret .private_extern __vpaes_encrypt_core .align 4 __vpaes_encrypt_core: movl $16,%ecx movl 240(%edx),%eax movdqa %xmm6,%xmm1 movdqa (%ebp),%xmm2 pandn %xmm0,%xmm1 pand %xmm6,%xmm0 movdqu (%edx),%xmm5 .byte 102,15,56,0,208 movdqa 16(%ebp),%xmm0 pxor %xmm5,%xmm2 psrld $4,%xmm1 addl $16,%edx .byte 102,15,56,0,193 leal 192(%ebp),%ebx pxor %xmm2,%xmm0 jmp L000enc_entry .align 4,0x90 L001enc_loop: movdqa 32(%ebp),%xmm4 movdqa 48(%ebp),%xmm0 .byte 102,15,56,0,226 .byte 102,15,56,0,195 pxor %xmm5,%xmm4 movdqa 64(%ebp),%xmm5 pxor %xmm4,%xmm0 movdqa -64(%ebx,%ecx,1),%xmm1 .byte 102,15,56,0,234 movdqa 80(%ebp),%xmm2 movdqa (%ebx,%ecx,1),%xmm4 .byte 102,15,56,0,211 movdqa %xmm0,%xmm3 pxor %xmm5,%xmm2 .byte 102,15,56,0,193 addl $16,%edx pxor %xmm2,%xmm0 .byte 102,15,56,0,220 addl $16,%ecx pxor %xmm0,%xmm3 .byte 102,15,56,0,193 andl $48,%ecx subl $1,%eax pxor %xmm3,%xmm0 L000enc_entry: movdqa %xmm6,%xmm1 movdqa -32(%ebp),%xmm5 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm6,%xmm0 .byte 102,15,56,0,232 movdqa %xmm7,%xmm3 pxor %xmm1,%xmm0 .byte 102,15,56,0,217 movdqa %xmm7,%xmm4 pxor %xmm5,%xmm3 .byte 102,15,56,0,224 movdqa %xmm7,%xmm2 pxor %xmm5,%xmm4 .byte 102,15,56,0,211 movdqa %xmm7,%xmm3 pxor %xmm0,%xmm2 .byte 102,15,56,0,220 movdqu (%edx),%xmm5 pxor %xmm1,%xmm3 jnz L001enc_loop movdqa 96(%ebp),%xmm4 movdqa 112(%ebp),%xmm0 .byte 102,15,56,0,226 pxor %xmm5,%xmm4 .byte 102,15,56,0,195 movdqa 64(%ebx,%ecx,1),%xmm1 pxor %xmm4,%xmm0 .byte 102,15,56,0,193 ret .private_extern __vpaes_decrypt_core .align 4 __vpaes_decrypt_core: leal 608(%ebp),%ebx movl 240(%edx),%eax movdqa %xmm6,%xmm1 movdqa -64(%ebx),%xmm2 pandn %xmm0,%xmm1 movl %eax,%ecx psrld $4,%xmm1 movdqu (%edx),%xmm5 shll $4,%ecx pand %xmm6,%xmm0 .byte 102,15,56,0,208 movdqa -48(%ebx),%xmm0 xorl $48,%ecx .byte 102,15,56,0,193 andl $48,%ecx pxor %xmm5,%xmm2 movdqa 176(%ebp),%xmm5 pxor %xmm2,%xmm0 addl $16,%edx leal -352(%ebx,%ecx,1),%ecx jmp L002dec_entry .align 4,0x90 L003dec_loop: movdqa -32(%ebx),%xmm4 movdqa -16(%ebx),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa (%ebx),%xmm4 pxor %xmm1,%xmm0 movdqa 16(%ebx),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa 32(%ebx),%xmm4 pxor %xmm1,%xmm0 movdqa 48(%ebx),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 movdqa 64(%ebx),%xmm4 pxor %xmm1,%xmm0 movdqa 80(%ebx),%xmm1 .byte 102,15,56,0,226 .byte 102,15,56,0,197 .byte 102,15,56,0,203 pxor %xmm4,%xmm0 addl $16,%edx .byte 102,15,58,15,237,12 pxor %xmm1,%xmm0 subl $1,%eax L002dec_entry: movdqa %xmm6,%xmm1 movdqa -32(%ebp),%xmm2 pandn %xmm0,%xmm1 pand %xmm6,%xmm0 psrld $4,%xmm1 .byte 102,15,56,0,208 movdqa %xmm7,%xmm3 pxor %xmm1,%xmm0 .byte 102,15,56,0,217 movdqa %xmm7,%xmm4 pxor %xmm2,%xmm3 .byte 102,15,56,0,224 pxor %xmm2,%xmm4 movdqa %xmm7,%xmm2 .byte 102,15,56,0,211 movdqa %xmm7,%xmm3 pxor %xmm0,%xmm2 .byte 102,15,56,0,220 movdqu (%edx),%xmm0 pxor %xmm1,%xmm3 jnz L003dec_loop movdqa 96(%ebx),%xmm4 .byte 102,15,56,0,226 pxor %xmm0,%xmm4 movdqa 112(%ebx),%xmm0 movdqa (%ecx),%xmm2 .byte 102,15,56,0,195 pxor %xmm4,%xmm0 .byte 102,15,56,0,194 ret .private_extern __vpaes_schedule_core .align 4 __vpaes_schedule_core: addl (%esp),%ebp movdqu (%esi),%xmm0 movdqa 320(%ebp),%xmm2 movdqa %xmm0,%xmm3 leal (%ebp),%ebx movdqa %xmm2,4(%esp) call __vpaes_schedule_transform movdqa %xmm0,%xmm7 testl %edi,%edi jnz L004schedule_am_decrypting movdqu %xmm0,(%edx) jmp L005schedule_go L004schedule_am_decrypting: movdqa 256(%ebp,%ecx,1),%xmm1 .byte 102,15,56,0,217 movdqu %xmm3,(%edx) xorl $48,%ecx L005schedule_go: cmpl $192,%eax ja L006schedule_256 je L007schedule_192 L008schedule_128: movl $10,%eax L009loop_schedule_128: call __vpaes_schedule_round decl %eax jz L010schedule_mangle_last call __vpaes_schedule_mangle jmp L009loop_schedule_128 .align 4,0x90 L007schedule_192: movdqu 8(%esi),%xmm0 call __vpaes_schedule_transform movdqa %xmm0,%xmm6 pxor %xmm4,%xmm4 movhlps %xmm4,%xmm6 movl $4,%eax L011loop_schedule_192: call __vpaes_schedule_round .byte 102,15,58,15,198,8 call __vpaes_schedule_mangle call __vpaes_schedule_192_smear call __vpaes_schedule_mangle call __vpaes_schedule_round decl %eax jz L010schedule_mangle_last call __vpaes_schedule_mangle call __vpaes_schedule_192_smear jmp L011loop_schedule_192 .align 4,0x90 L006schedule_256: movdqu 16(%esi),%xmm0 call __vpaes_schedule_transform movl $7,%eax L012loop_schedule_256: call __vpaes_schedule_mangle movdqa %xmm0,%xmm6 call __vpaes_schedule_round decl %eax jz L010schedule_mangle_last call __vpaes_schedule_mangle pshufd $255,%xmm0,%xmm0 movdqa %xmm7,20(%esp) movdqa %xmm6,%xmm7 call L_vpaes_schedule_low_round movdqa 20(%esp),%xmm7 jmp L012loop_schedule_256 .align 4,0x90 L010schedule_mangle_last: leal 384(%ebp),%ebx testl %edi,%edi jnz L013schedule_mangle_last_dec movdqa 256(%ebp,%ecx,1),%xmm1 .byte 102,15,56,0,193 leal 352(%ebp),%ebx addl $32,%edx L013schedule_mangle_last_dec: addl $-16,%edx pxor 336(%ebp),%xmm0 call __vpaes_schedule_transform movdqu %xmm0,(%edx) pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 pxor %xmm3,%xmm3 pxor %xmm4,%xmm4 pxor %xmm5,%xmm5 pxor %xmm6,%xmm6 pxor %xmm7,%xmm7 ret .private_extern __vpaes_schedule_192_smear .align 4 __vpaes_schedule_192_smear: pshufd $128,%xmm6,%xmm1 pshufd $254,%xmm7,%xmm0 pxor %xmm1,%xmm6 pxor %xmm1,%xmm1 pxor %xmm0,%xmm6 movdqa %xmm6,%xmm0 movhlps %xmm1,%xmm6 ret .private_extern __vpaes_schedule_round .align 4 __vpaes_schedule_round: movdqa 8(%esp),%xmm2 pxor %xmm1,%xmm1 .byte 102,15,58,15,202,15 .byte 102,15,58,15,210,15 pxor %xmm1,%xmm7 pshufd $255,%xmm0,%xmm0 .byte 102,15,58,15,192,1 movdqa %xmm2,8(%esp) L_vpaes_schedule_low_round: movdqa %xmm7,%xmm1 pslldq $4,%xmm7 pxor %xmm1,%xmm7 movdqa %xmm7,%xmm1 pslldq $8,%xmm7 pxor %xmm1,%xmm7 pxor 336(%ebp),%xmm7 movdqa -16(%ebp),%xmm4 movdqa -48(%ebp),%xmm5 movdqa %xmm4,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm4,%xmm0 movdqa -32(%ebp),%xmm2 .byte 102,15,56,0,208 pxor %xmm1,%xmm0 movdqa %xmm5,%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 movdqa %xmm5,%xmm4 .byte 102,15,56,0,224 pxor %xmm2,%xmm4 movdqa %xmm5,%xmm2 .byte 102,15,56,0,211 pxor %xmm0,%xmm2 movdqa %xmm5,%xmm3 .byte 102,15,56,0,220 pxor %xmm1,%xmm3 movdqa 32(%ebp),%xmm4 .byte 102,15,56,0,226 movdqa 48(%ebp),%xmm0 .byte 102,15,56,0,195 pxor %xmm4,%xmm0 pxor %xmm7,%xmm0 movdqa %xmm0,%xmm7 ret .private_extern __vpaes_schedule_transform .align 4 __vpaes_schedule_transform: movdqa -16(%ebp),%xmm2 movdqa %xmm2,%xmm1 pandn %xmm0,%xmm1 psrld $4,%xmm1 pand %xmm2,%xmm0 movdqa (%ebx),%xmm2 .byte 102,15,56,0,208 movdqa 16(%ebx),%xmm0 .byte 102,15,56,0,193 pxor %xmm2,%xmm0 ret .private_extern __vpaes_schedule_mangle .align 4 __vpaes_schedule_mangle: movdqa %xmm0,%xmm4 movdqa 128(%ebp),%xmm5 testl %edi,%edi jnz L014schedule_mangle_dec addl $16,%edx pxor 336(%ebp),%xmm4 .byte 102,15,56,0,229 movdqa %xmm4,%xmm3 .byte 102,15,56,0,229 pxor %xmm4,%xmm3 .byte 102,15,56,0,229 pxor %xmm4,%xmm3 jmp L015schedule_mangle_both .align 4,0x90 L014schedule_mangle_dec: movdqa -16(%ebp),%xmm2 leal 416(%ebp),%esi movdqa %xmm2,%xmm1 pandn %xmm4,%xmm1 psrld $4,%xmm1 pand %xmm2,%xmm4 movdqa (%esi),%xmm2 .byte 102,15,56,0,212 movdqa 16(%esi),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 32(%esi),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 48(%esi),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 64(%esi),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 80(%esi),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 .byte 102,15,56,0,221 movdqa 96(%esi),%xmm2 .byte 102,15,56,0,212 pxor %xmm3,%xmm2 movdqa 112(%esi),%xmm3 .byte 102,15,56,0,217 pxor %xmm2,%xmm3 addl $-16,%edx L015schedule_mangle_both: movdqa 256(%ebp,%ecx,1),%xmm1 .byte 102,15,56,0,217 addl $-16,%ecx andl $48,%ecx movdqu %xmm3,(%edx) ret .globl _vpaes_set_encrypt_key .private_extern _vpaes_set_encrypt_key .align 4 _vpaes_set_encrypt_key: L_vpaes_set_encrypt_key_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi #ifdef BORINGSSL_DISPATCH_TEST pushl %ebx pushl %edx call L016pic L016pic: popl %ebx leal _BORINGSSL_function_hit+5-L016pic(%ebx),%ebx movl $1,%edx movb %dl,(%ebx) popl %edx popl %ebx #endif movl 20(%esp),%esi leal -56(%esp),%ebx movl 24(%esp),%eax andl $-16,%ebx movl 28(%esp),%edx xchgl %esp,%ebx movl %ebx,48(%esp) movl %eax,%ebx shrl $5,%ebx addl $5,%ebx movl %ebx,240(%edx) movl $48,%ecx movl $0,%edi leal L_vpaes_consts+0x30-L017pic_point,%ebp call __vpaes_schedule_core L017pic_point: movl 48(%esp),%esp xorl %eax,%eax popl %edi popl %esi popl %ebx popl %ebp ret .globl _vpaes_set_decrypt_key .private_extern _vpaes_set_decrypt_key .align 4 _vpaes_set_decrypt_key: L_vpaes_set_decrypt_key_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi movl 20(%esp),%esi leal -56(%esp),%ebx movl 24(%esp),%eax andl $-16,%ebx movl 28(%esp),%edx xchgl %esp,%ebx movl %ebx,48(%esp) movl %eax,%ebx shrl $5,%ebx addl $5,%ebx movl %ebx,240(%edx) shll $4,%ebx leal 16(%edx,%ebx,1),%edx movl $1,%edi movl %eax,%ecx shrl $1,%ecx andl $32,%ecx xorl $32,%ecx leal L_vpaes_consts+0x30-L018pic_point,%ebp call __vpaes_schedule_core L018pic_point: movl 48(%esp),%esp xorl %eax,%eax popl %edi popl %esi popl %ebx popl %ebp ret .globl _vpaes_encrypt .private_extern _vpaes_encrypt .align 4 _vpaes_encrypt: L_vpaes_encrypt_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi #ifdef BORINGSSL_DISPATCH_TEST pushl %ebx pushl %edx call L019pic L019pic: popl %ebx leal _BORINGSSL_function_hit+4-L019pic(%ebx),%ebx movl $1,%edx movb %dl,(%ebx) popl %edx popl %ebx #endif leal L_vpaes_consts+0x30-L020pic_point,%ebp call __vpaes_preheat L020pic_point: movl 20(%esp),%esi leal -56(%esp),%ebx movl 24(%esp),%edi andl $-16,%ebx movl 28(%esp),%edx xchgl %esp,%ebx movl %ebx,48(%esp) movdqu (%esi),%xmm0 call __vpaes_encrypt_core movdqu %xmm0,(%edi) movl 48(%esp),%esp popl %edi popl %esi popl %ebx popl %ebp ret .globl _vpaes_decrypt .private_extern _vpaes_decrypt .align 4 _vpaes_decrypt: L_vpaes_decrypt_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi leal L_vpaes_consts+0x30-L021pic_point,%ebp call __vpaes_preheat L021pic_point: movl 20(%esp),%esi leal -56(%esp),%ebx movl 24(%esp),%edi andl $-16,%ebx movl 28(%esp),%edx xchgl %esp,%ebx movl %ebx,48(%esp) movdqu (%esi),%xmm0 call __vpaes_decrypt_core movdqu %xmm0,(%edi) movl 48(%esp),%esp popl %edi popl %esi popl %ebx popl %ebp ret .globl _vpaes_cbc_encrypt .private_extern _vpaes_cbc_encrypt .align 4 _vpaes_cbc_encrypt: L_vpaes_cbc_encrypt_begin: pushl %ebp pushl %ebx pushl %esi pushl %edi movl 20(%esp),%esi movl 24(%esp),%edi movl 28(%esp),%eax movl 32(%esp),%edx subl $16,%eax jc L022cbc_abort leal -56(%esp),%ebx movl 36(%esp),%ebp andl $-16,%ebx movl 40(%esp),%ecx xchgl %esp,%ebx movdqu (%ebp),%xmm1 subl %esi,%edi movl %ebx,48(%esp) movl %edi,(%esp) movl %edx,4(%esp) movl %ebp,8(%esp) movl %eax,%edi leal L_vpaes_consts+0x30-L023pic_point,%ebp call __vpaes_preheat L023pic_point: cmpl $0,%ecx je L024cbc_dec_loop jmp L025cbc_enc_loop .align 4,0x90 L025cbc_enc_loop: movdqu (%esi),%xmm0 pxor %xmm1,%xmm0 call __vpaes_encrypt_core movl (%esp),%ebx movl 4(%esp),%edx movdqa %xmm0,%xmm1 movdqu %xmm0,(%ebx,%esi,1) leal 16(%esi),%esi subl $16,%edi jnc L025cbc_enc_loop jmp L026cbc_done .align 4,0x90 L024cbc_dec_loop: movdqu (%esi),%xmm0 movdqa %xmm1,16(%esp) movdqa %xmm0,32(%esp) call __vpaes_decrypt_core movl (%esp),%ebx movl 4(%esp),%edx pxor 16(%esp),%xmm0 movdqa 32(%esp),%xmm1 movdqu %xmm0,(%ebx,%esi,1) leal 16(%esi),%esi subl $16,%edi jnc L024cbc_dec_loop L026cbc_done: movl 8(%esp),%ebx movl 48(%esp),%esp movdqu %xmm1,(%ebx) L022cbc_abort: popl %edi popl %esi popl %ebx popl %ebp ret #endif // !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__)